Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions qiskit_experiments/library/calibration/fine_drag_cal.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,6 @@ def __init__(
auto_update=auto_update,
)

self.set_transpile_options(basis_gates=["sx", schedule_name, "rz"])

@classmethod
def _default_experiment_options(cls) -> Options:
"""Default experiment options.
Expand Down
111 changes: 60 additions & 51 deletions qiskit_experiments/test/fake_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,33 +111,38 @@ def create_experiment(
# backend - the query methods `experiment` and `experiments` are supposed to return an
# an instantiated backend object, and not only the backend name. We assume that the fake
# service works with the fake backend (class FakeBackend).
self.exps = pd.concat(
row = pd.DataFrame(
[
self.exps,
pd.DataFrame(
[
{
"experiment_type": experiment_type,
"experiment_id": experiment_id,
"parent_id": parent_id,
"backend_name": backend_name,
"metadata": metadata,
"job_ids": job_ids,
"tags": tags,
"notes": notes,
"share_level": kwargs.get("share_level", None),
"device_components": [],
"start_datetime": datetime(2022, 1, 1)
+ timedelta(hours=len(self.exps)),
"figure_names": [],
"backend": FakeBackend(backend_name=backend_name),
}
],
columns=self.exps.columns,
),
{
"experiment_type": experiment_type,
"experiment_id": experiment_id,
"parent_id": parent_id,
"backend_name": backend_name,
"metadata": metadata,
"job_ids": job_ids,
"tags": tags,
"notes": notes,
"share_level": kwargs.get("share_level", None),
"device_components": [],
"start_datetime": datetime(2022, 1, 1) + timedelta(hours=len(self.exps)),
"figure_names": [],
"backend": FakeBackend(backend_name=backend_name),
}
],
ignore_index=True,
columns=self.exps.columns,
)
if len(self.exps) > 0:
self.exps = pd.concat(
[
self.exps,
row,
],
ignore_index=True,
)
else:
# Avoid the FutureWarning on concatenating empty DataFrames
# introduced in https://github.com/pandas-dev/pandas/pull/52532
self.exps = row

return experiment_id

Expand Down Expand Up @@ -293,35 +298,39 @@ def create_analysis_result(
# `IBMExperimentService.create_analysis_result`. Since `DbExperimentData` does not set it
# via kwargs (as it does with chisq), the user cannot control the time and the service
# alone decides about it. Here we've chosen to set the start date of the experiment.
self.results = pd.concat(
row = pd.DataFrame(
[
self.results,
pd.DataFrame(
[
{
"result_data": result_data,
"result_id": result_id,
"result_type": result_type,
"device_components": device_components,
"experiment_id": experiment_id,
"quality": quality,
"verified": verified,
"tags": tags,
"backend_name": self.exps.loc[self.exps.experiment_id == experiment_id]
.iloc[0]
.backend_name,
"chisq": kwargs.get("chisq", None),
"creation_datetime": self.exps.loc[
self.exps.experiment_id == experiment_id
]
.iloc[0]
.start_datetime,
}
]
),
],
ignore_index=True,
{
"result_data": result_data,
"result_id": result_id,
"result_type": result_type,
"device_components": device_components,
"experiment_id": experiment_id,
"quality": quality,
"verified": verified,
"tags": tags,
"backend_name": self.exps.loc[self.exps.experiment_id == experiment_id]
.iloc[0]
.backend_name,
"chisq": kwargs.get("chisq", None),
"creation_datetime": self.exps.loc[self.exps.experiment_id == experiment_id]
.iloc[0]
.start_datetime,
}
]
)
if len(self.results) > 0:
self.results = pd.concat(
[
self.results,
row,
],
ignore_index=True,
)
else:
# Avoid the FutureWarning on concatenating empty DataFrames
# introduced in https://github.com/pandas-dev/pandas/pull/52532
self.results = row

# a helper method for updating the experiment's device components, see usage below
def add_new_components(expcomps):
Expand Down
2 changes: 1 addition & 1 deletion qiskit_experiments/test/pulse_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def __init__(
None,
name="PulseBackendV2",
description="A PulseBackend simulator",
online_date=datetime.datetime.utcnow(),
online_date=datetime.datetime.now(datetime.timezone.utc),
backend_version="0.0.1",
)

Expand Down
41 changes: 25 additions & 16 deletions qiskit_experiments/visualization/drawers/mpl_drawer.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,6 +147,31 @@ def format_canvas(self):
else:
all_axes = [self._axis]

# Set axes scale. This needs to be done before anything tries to work with
# the axis limits because if no limits or data are set explicitly the
# default limits depend on the scale method (for example, the minimum
# value is 0 for linear scaling but not for log scaling).
def signed_sqrt(x):
return np.sign(x) * np.sqrt(abs(x))

def signed_square(x):
return np.sign(x) * x**2

for ax_type in ("x", "y"):
for sub_ax in all_axes:
scale = self.figure_options.get(f"{ax_type}scale")
if ax_type == "x":
mpl_setscale = sub_ax.set_xscale
else:
mpl_setscale = sub_ax.set_yscale

# Apply non linear axis spacing
if scale is not None:
if scale == "quadratic":
mpl_setscale("function", functions=(signed_square, signed_sqrt))
else:
mpl_setscale(scale)

# Get axis formatter from drawing options
formatter_opts = {}
for ax_type in ("x", "y"):
Expand Down Expand Up @@ -181,12 +206,6 @@ def format_canvas(self):
"max_ax_vals": max_vals,
}

def signed_sqrt(x):
return np.sign(x) * np.sqrt(abs(x))

def signed_square(x):
return np.sign(x) * x**2

for i, sub_ax in enumerate(all_axes):
# Add data labels if there are multiple labels registered per sub_ax.
_, labels = sub_ax.get_legend_handles_labels()
Expand All @@ -197,18 +216,15 @@ def signed_square(x):
limit = formatter_opts[ax_type]["limit"][i]
unit = formatter_opts[ax_type]["unit"][i]
unit_scale = formatter_opts[ax_type]["unit_scale"][i]
scale = self.figure_options.get(f"{ax_type}scale")
min_ax_vals = formatter_opts[ax_type]["min_ax_vals"]
max_ax_vals = formatter_opts[ax_type]["max_ax_vals"]
share_axis = self.figure_options.get(f"share{ax_type}")

if ax_type == "x":
mpl_setscale = sub_ax.set_xscale
mpl_axis_obj = getattr(sub_ax, "xaxis")
mpl_setlimit = sub_ax.set_xlim
mpl_share = sub_ax.sharex
else:
mpl_setscale = sub_ax.set_yscale
mpl_axis_obj = getattr(sub_ax, "yaxis")
mpl_setlimit = sub_ax.set_ylim
mpl_share = sub_ax.sharey
Expand All @@ -219,13 +235,6 @@ def signed_square(x):
else:
limit = min_ax_vals[i], max_ax_vals[i]

# Apply non linear axis spacing
if scale is not None:
if scale == "quadratic":
mpl_setscale("function", functions=(signed_square, signed_sqrt))
else:
mpl_setscale(scale)

# Create formatter for axis tick label notation
if unit and unit_scale:
# If value is specified, automatically scale axis magnitude
Expand Down
20 changes: 20 additions & 0 deletions test/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,26 @@ def setUpClass(cls):
super().setUpClass()

warnings.filterwarnings("error", category=DeprecationWarning)
# Tests should not generate any warnings unless testing those
# warnings. In that case, the test should catch the warning
# assertWarns or warnings.catch_warnings.
warnings.filterwarnings("error", module="qiskit_experiments")
# Ideally, changes introducing pending deprecations should include
# alternative code paths and not need to generate warnings in the
# tests but until this exception is necessary until the use of the
# deprecated ScatterTable methods are removed.
warnings.filterwarnings(
"default",
module="qiskit_experiments",
message=".*Curve data uses dataframe representation.*",
category=PendingDeprecationWarning,
)
warnings.filterwarnings(
"default",
module="qiskit_experiments",
message=".*The curve data representation is replaced with dataframe format.*",
category=PendingDeprecationWarning,
)

# Some functionality may be deprecated in Qiskit Experiments. If
# the deprecation warnings aren't filtered, the tests will fail as
Expand Down
Loading