Skip to content

Commit

Permalink
fix: error of create_instance (#403)
Browse files Browse the repository at this point in the history
Signed-off-by: rokamu623 <[email protected]>
  • Loading branch information
rokamu623 authored Oct 26, 2023
1 parent 1f08e16 commit 29fe312
Show file tree
Hide file tree
Showing 6 changed files with 72 additions and 51 deletions.
41 changes: 22 additions & 19 deletions src/caret_analyze/infra/lttng/records_provider_lttng.py
Original file line number Diff line number Diff line change
Expand Up @@ -1387,15 +1387,15 @@ def tilde_subscribe_records(
if len(grouped_records) == 0:
return RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.TILDE_SUBSCRIBE_TIMESTAMP),
ColumnValue(COLUMN_NAME.TILDE_SUBSCRIPTION),
ColumnValue(COLUMN_NAME.TILDE_MESSAGE_ID),
]
)
sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
sub_records = RecordsFactory.create_instance(None, column_values)
sub_records = RecordsFactory.create_instance(None, columns=column_values)

if tilde_subscription is not None and tilde_subscription in grouped_records:
sub_records_ = grouped_records[tilde_subscription].clone()
Expand Down Expand Up @@ -1435,15 +1435,15 @@ def sub_records(
if len(grouped_records) == 0:
return RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.SOURCE_TIMESTAMP),
]
)
sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
sub_records = RecordsFactory.create_instance(None, column_values)
sub_records = RecordsFactory.create_instance(None, columns=column_values)

if inter_callback_object in grouped_records:
sub_records.concat(grouped_records[inter_callback_object].clone())
Expand Down Expand Up @@ -1543,17 +1543,20 @@ def intra_comm_records(
grouped_records = self._grouped_intra_comm_records

if len(grouped_records) == 0:
return RecordsFactory.create_instance(None, [
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.PUBLISHER_HANDLE),
ColumnValue(COLUMN_NAME.RCLCPP_PUBLISH_TIMESTAMP),
ColumnValue(COLUMN_NAME.MESSAGE_TIMESTAMP),
])
return RecordsFactory.create_instance(
None,
columns=[
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.PUBLISHER_HANDLE),
ColumnValue(COLUMN_NAME.RCLCPP_PUBLISH_TIMESTAMP),
ColumnValue(COLUMN_NAME.MESSAGE_TIMESTAMP),
]
)

sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
records = RecordsFactory.create_instance(None, column_values)
records = RecordsFactory.create_instance(None, columns=column_values)

if intra_callback_object is not None:
for publisher_handle in publisher_handles:
Expand Down Expand Up @@ -1603,7 +1606,7 @@ def publish_records(
if len(grouped_records) == 0:
return RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.PUBLISHER_HANDLE),
ColumnValue(COLUMN_NAME.RCLCPP_PUBLISH_TIMESTAMP),
ColumnValue(COLUMN_NAME.MESSAGE_TIMESTAMP),
Expand All @@ -1612,7 +1615,7 @@ def publish_records(
)
sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
pub_records = RecordsFactory.create_instance(None, column_values)
pub_records = RecordsFactory.create_instance(None, columns=column_values)

for publisher_handle in publisher_handles:
if publisher_handle in grouped_records:
Expand Down Expand Up @@ -1649,7 +1652,7 @@ def tilde_publish_records(
if len(grouped_records) == 0:
return RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.TILDE_PUBLISH_TIMESTAMP),
ColumnValue(COLUMN_NAME.TILDE_PUBLISHER),
ColumnValue(COLUMN_NAME.TILDE_MESSAGE_ID),
Expand All @@ -1658,7 +1661,7 @@ def tilde_publish_records(
)
sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
tilde_records = RecordsFactory.create_instance(None, column_values)
tilde_records = RecordsFactory.create_instance(None, columns=column_values)

for tilde_publisher in tilde_publishers:
if tilde_publisher in grouped_records:
Expand Down Expand Up @@ -1706,7 +1709,7 @@ def callback_records(
records = self._grouped_callback_records
callback_records = RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.CALLBACK_END_TIMESTAMP),
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
Expand Down Expand Up @@ -1755,7 +1758,7 @@ def path_beginning_records(
if len(grouped_records) == 0:
return RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.RCLCPP_PUBLISH_TIMESTAMP),
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
Expand All @@ -1764,7 +1767,7 @@ def path_beginning_records(
)
sample_records = list(grouped_records.values())[0]
column_values = Columns.from_str(sample_records.columns).to_value()
records = RecordsFactory.create_instance(None, column_values)
records = RecordsFactory.create_instance(None, columns=column_values)

for publisher_handle in publisher_handles:
if publisher_handle in grouped_records:
Expand Down
10 changes: 5 additions & 5 deletions src/caret_analyze/infra/lttng/records_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def create(self, until_ns: int) -> RecordsInterface:
ColumnValue(COLUMN_NAME.TIMER_EVENT_TIMESTAMP),
]

records = RecordsFactory.create_instance(None, columns)
records = RecordsFactory.create_instance(None, columns=columns)
for control in self._controls:

if isinstance(control, TimerInit):
Expand Down Expand Up @@ -282,7 +282,7 @@ def tilde_subscribe_records(self) -> RecordsInterface:
def intra_callback_records(self) -> RecordsInterface:
intra_proc_subscribe = RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue('callback_start_timestamp'),
ColumnValue('callback_object'),
ColumnValue('is_intra_process'),
Expand All @@ -298,7 +298,7 @@ def intra_callback_records(self) -> RecordsInterface:
def inter_callback_records(self) -> RecordsInterface:
inter_proc_subscribe = RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue('tid'),
ColumnValue('callback_start_timestamp'),
ColumnValue('callback_object'),
Expand Down Expand Up @@ -374,7 +374,7 @@ def subscribe_records(self) -> RecordsInterface:

inter_proc_subscribe = RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.CALLBACK_START_TIMESTAMP),
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
ColumnValue(COLUMN_NAME.IS_INTRA_PROCESS),
Expand Down Expand Up @@ -570,7 +570,7 @@ def intra_proc_comm_records_iron(self) -> RecordsInterface:
grouped_sub_records = sub_records.groupby(['buffer'])
intra_records = RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue(COLUMN_NAME.TID),
ColumnValue(COLUMN_NAME.PUBLISHER_HANDLE),
ColumnValue(COLUMN_NAME.CALLBACK_OBJECT),
Expand Down
64 changes: 41 additions & 23 deletions src/caret_analyze/infra/lttng/ros2_tracing/data_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,50 +97,57 @@ def __init__(self) -> None:

# Events (multiple instances, may not have a meaningful index)
self.callback_start_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('callback_start_timestamp'),
ColumnValue('callback_object'),
ColumnValue('is_intra_process'),
]
)
self.callback_end_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('callback_end_timestamp'),
ColumnValue('callback_object'),
]
)
self.dds_write_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('dds_write_timestamp'),
ColumnValue('message'),
]
)
self.dds_bind_addr_to_stamp = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('dds_bind_addr_to_stamp_timestamp'),
ColumnValue('addr'),
ColumnValue('source_timestamp'),
]
)
self.dds_bind_addr_to_addr = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('dds_bind_addr_to_addr_timestamp'),
ColumnValue('addr_from'),
ColumnValue('addr_to'),
]
)
self.on_data_available_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('on_data_available_timestamp'),
ColumnValue('source_timestamp'),
]
)
self.rclcpp_intra_publish_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rclcpp_intra_publish_timestamp'),
ColumnValue('publisher_handle'),
Expand All @@ -149,7 +156,8 @@ def __init__(self) -> None:
]
)
self.rclcpp_ring_buffer_enqueue_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rclcpp_ring_buffer_enqueue_timestamp'),
ColumnValue('buffer'),
Expand All @@ -159,7 +167,8 @@ def __init__(self) -> None:
]
)
self.rclcpp_ring_buffer_dequeue_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rclcpp_ring_buffer_dequeue_timestamp'),
ColumnValue('buffer'),
Expand All @@ -168,7 +177,8 @@ def __init__(self) -> None:
]
)
self.rclcpp_publish_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rclcpp_publish_timestamp'),
ColumnValue('publisher_handle'),
Expand All @@ -177,71 +187,79 @@ def __init__(self) -> None:
]
)
self.rcl_publish_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rcl_publish_timestamp'),
ColumnValue('publisher_handle'),
ColumnValue('message'),
]
)
self.dispatch_subscription_callback_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('dispatch_subscription_callback_timestamp'),
ColumnValue('callback_object'),
ColumnValue('message'),
ColumnValue('source_timestamp'),
ColumnValue('message_timestamp'),
])
]
)
self.rmw_take_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tid'),
ColumnValue('rmw_take_timestamp'),
ColumnValue('rmw_subscription_handle'),
ColumnValue('message'),
ColumnValue('source_timestamp')
])
]
)
self.dispatch_intra_process_subscription_callback_instances = \
RecordsFactory.create_instance(
None,
[
columns=[
ColumnValue('dispatch_intra_process_subscription_callback_timestamp'),
ColumnValue('callback_object'),
ColumnValue('message'),
ColumnValue('message_timestamp'),
]
)
self.message_construct_instances = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('message_construct_timestamp'),
ColumnValue('original_message'),
ColumnValue('constructed_message'),
]
)

self.tilde_subscribe = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tilde_subscribe_timestamp'),
ColumnValue('subscription'),
ColumnValue('tilde_message_id'),
]
)

self.tilde_publish = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('tilde_publish_timestamp'),
ColumnValue('publisher'),
ColumnValue('subscription_id'),
ColumnValue('tilde_message_id'),
]
)
self.sim_time = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('system_time'),
ColumnValue('sim_time'),
]
)
self.timer_event = RecordsFactory.create_instance(
None, [
None,
columns=[
ColumnValue('time_event_stamp'),
]
)
Expand Down
2 changes: 1 addition & 1 deletion src/caret_analyze/plot/metrics_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _convert_timeseries_records_to_sim_time(
columns: list[ColumnValue] = \
[ColumnValue(_) for _ in records.columns]

converted_records_list.append(RecordsFactory.create_instance(values, columns))
converted_records_list.append(RecordsFactory.create_instance(values, columns=columns))

return converted_records_list

Expand Down
2 changes: 1 addition & 1 deletion src/caret_analyze/record/records_service/response_time.py
Original file line number Diff line number Diff line change
Expand Up @@ -849,7 +849,7 @@ def _create_empty_records(
columns = columns or [ColumnValue(column) for column in self._columns]
return RecordsFactory.create_instance(
None,
columns
columns=columns
)

def _create_all_pattern_records(self) -> RecordsInterface:
Expand Down
4 changes: 2 additions & 2 deletions src/caret_analyze/record/records_service/stacked_bar.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def _get_x_axis_values(
series = records.get_column_series(column)
record_dict = [{xlabel: _} for _ in series]
record: RecordsInterface = \
RecordsFactory.create_instance(record_dict, [ColumnValue(xlabel)])
RecordsFactory.create_instance(record_dict, columns=[ColumnValue(xlabel)])
return record

def _to_stacked_bar_records(
Expand Down Expand Up @@ -226,7 +226,7 @@ def _merge_column_series(
if len(records.data) == 0:
new_records: RecordsInterface = \
RecordsFactory.create_instance(
record_dict, [ColumnValue(column)])
record_dict, columns=[ColumnValue(column)])
records.concat(new_records)
else:
records.append_column(ColumnValue(column), series)
Expand Down

0 comments on commit 29fe312

Please sign in to comment.