Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 52 additions & 0 deletions qiskit_ibm_runtime/utils/circuit_schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,7 @@ def preprocess(
filter_awgr: bool = False,
filter_barriers: bool = False,
included_channels: list = None,
merge_common_instructions: bool = False,
) -> None:
"""Preprocess and filter the parsed circuit schedule data for visualization.

Expand All @@ -150,6 +151,8 @@ def preprocess(
included_channels: If not ``None``, remove all channels from scheduling data
that are not in the ``included_channels`` list and reorder the plot's
y-axis according to the ``included_channels`` order.
merge_common_instructions: If ``True``, merge instructions of the same type
based on temporal continuity.
"""
# filter channels
if included_channels is not None and isinstance(included_channels, list):
Expand All @@ -170,6 +173,10 @@ def preprocess(
mask = self.circuit_scheduling[:, self.type_to_idx["Instruction"]] != BARRIER
self.circuit_scheduling = self.circuit_scheduling[mask]

# merge common consecutive instructions
if merge_common_instructions:
self.merge_common_instructions()

self.circuit_scheduling = self.circuit_scheduling[
np.argsort(self.circuit_scheduling[:, self.type_to_idx["Channel"]])
]
Expand All @@ -187,6 +194,51 @@ def preprocess(
self.instruction_set = np.unique(self.circuit_scheduling[:, self.type_to_idx["GateName"]])
self.color_map = dict(zip(self.instruction_set, cycle(colors)))

def merge_common_instructions(self) -> None:
"""Iterate through ``circuit_scheduling`` and merge instructions of the same type based on
temporal continuity.
"""
new_arr = []

t0_idx = self.type_to_idx["Start"]
tf_idx = self.type_to_idx["Finish"]

# find unique instruction groups based on ("Branch", "Instruction", "Channel") information
keys = self.circuit_scheduling[
:, [self.type_to_idx[col_type] for col_type in ["Branch", "Instruction", "Channel"]]
]
_, group_indices = np.unique(keys, axis=0, return_inverse=True)

for g in np.unique(group_indices):
merged_group = []
group = self.circuit_scheduling[group_indices == g]

# return early if group is trivial
if len(group) == 1:
new_arr.append(group[0])
continue

# reorder group according to increasing t0
t0_increasing_order = np.argsort(np.array(group[:, t0_idx], dtype=int))
group_increasing = group[t0_increasing_order]

# merge consecutive instructions
merged_group.append(group_increasing[0])
for curr_row in group_increasing[1:]:
prev_row = merged_group.pop()

# check for temporal continuity
if int(curr_row[t0_idx]) == int(prev_row[tf_idx]):
# merge
prev_row[tf_idx] = curr_row[tf_idx]
merged_group.append(prev_row)
else:
merged_group.append(prev_row)
merged_group.append(curr_row)
new_arr.extend(merged_group)

self.circuit_scheduling = np.array(new_arr)

def get_trace_finite_duration_y_shift(self, branch: str) -> tuple[float, float, float]:
"""Return y-axis trace shift for a finite duration instruction schedule and its annotation.
The shifts are to distinguish static and dynamic (control-flow) parts of the circuit.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ def draw_circuit_schedule_timing(
included_channels: list = None,
filter_readout_channels: bool = False,
filter_barriers: bool = False,
merge_common_instructions: bool = False,
width: int = 1400,
) -> PlotlyFigure:
r"""
Expand All @@ -40,6 +41,8 @@ def draw_circuit_schedule_timing(
and to order the y-axis accordingly.
filter_readout_channels: If ``True``, remove all readout channels.
filter_barriers: If ``True``, remove all barriers.
merge_common_instructions: If ``True``, merge instructions of the same type
based on temporal continuity.
width: The width of the returned figure.

Returns:
Expand All @@ -66,6 +69,7 @@ def draw_circuit_schedule_timing(
included_channels=included_channels,
filter_awgr=filter_readout_channels,
filter_barriers=filter_barriers,
merge_common_instructions=merge_common_instructions,
)

# Setup the figure
Expand Down
19 changes: 19 additions & 0 deletions release-notes/unreleased/2494.update.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
Add a new ``merge_common_instructions`` input argument to :meth:`draw_circuit_schedule_timing` that
if set to ``True`` will merge instructions of the same type based on temporal continuity.

Example,

.. code-block:: python

from qiskit_ibm_runtime.visualization import draw_circuit_schedule_timing


job_result: SamplerPubResult = job.result()
circuit_schedule: str = job_result[0].metadata["compilation"]["scheduler_timing"]["timing"]

fig = draw_circuit_schedule_timing(
circuit_schedule=circuit_schedule,
merge_common_instructions=True
)

fig.show()
18 changes: 18 additions & 0 deletions test/unit/mock/fake_circuit_schedule_timing.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,3 +150,21 @@ class FakeCircuitScheduleInputData:
}
},
)

sampler_pub_result_merge = SamplerPubResult(
data=DataBin(),
metadata={
"compilation": {
"scheduler_timing": {
"timing": (
"main,reset_4,Qubit 4,7,64,play\n"
"main,reset_4,Qubit 4,71,108,play\n"
"main,reset_4,AWGR1_0,118,325,capture\n"
"main,reset_4,Qubit 4,179,64,play\n"
"main,reset_4,Qubit 4,243,64,play\n"
"main,reset_4,Qubit 4,577,8,play\n"
)
}
}
},
)
26 changes: 26 additions & 0 deletions test/unit/test_circuit_schedule.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,16 @@ def setUp(self) -> None:
"""Set up."""
fake_sampler_pub_result_large = FakeCircuitScheduleInputData.sampler_pub_result_large
fake_sampler_pub_result_small = FakeCircuitScheduleInputData.sampler_pub_result_small
fake_sampler_pub_result_merge = FakeCircuitScheduleInputData.sampler_pub_result_merge
self.circuit_schedule_large_data = fake_sampler_pub_result_large.metadata["compilation"][
"scheduler_timing"
]["timing"]
self.circuit_schedule_small_data = fake_sampler_pub_result_small.metadata["compilation"][
"scheduler_timing"
]["timing"]
self.circuit_schedule_merge_data = fake_sampler_pub_result_merge.metadata["compilation"][
"scheduler_timing"
]["timing"]

def get_large_mock_data(self):
"""Return the whole data object"""
Expand All @@ -43,6 +47,10 @@ def get_small_mock_data(self):
"""Return small constant portion of data object"""
return self.circuit_schedule_small_data

def get_merge_mock_data(self):
"""Return a merge use case data object"""
return self.circuit_schedule_merge_data

def test__load(self):
"""Test data loading"""
data = self.get_small_mock_data()
Expand Down Expand Up @@ -110,6 +118,24 @@ def test_preprocess(
if top_channel is not None:
self.assertEqual(circuit_schedule.channels[-1], top_channel)

@ddt.data(
(False, 6),
(True, 3),
)
@ddt.unpack
def test_merge_common_instructions(self, to_merge_instruction, n_instructions):
"""Test for instructions merging"""
data = self.get_merge_mock_data()
circuit_schedule = CircuitSchedule(data)

circuit_schedule.preprocess(
included_channels=None,
filter_awgr=False,
filter_barriers=False,
merge_common_instructions=to_merge_instruction,
)
self.assertEqual(len(circuit_schedule.circuit_scheduling), n_instructions)

def test_get_trace_finite_duration_y_shift(self):
"""Test that x, y, and z shifts for finite duration traces are set correctly"""
branches = ("main", "then", "else")
Expand Down