-
Notifications
You must be signed in to change notification settings - Fork 1.2k
Update T1 experiment #6487
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Update T1 experiment #6487
Changes from all commits
633cfa6
8008823
b8264b9
07fcfac
e73d8df
c8041cc
3c6591e
5bbc32d
d795b39
f19c740
ea4e8b9
a6fdb92
3c107ec
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -53,15 +53,15 @@ def noisy_moment(self, moment, system_qubits): | |
| repetitions=10, | ||
| max_delay=cirq.Duration(nanos=500), | ||
| ) | ||
| results.plot() | ||
| results.plot(include_fit=True) | ||
|
|
||
|
|
||
| def test_result_eq(): | ||
| eq = cirq.testing.EqualsTester() | ||
| eq.make_equality_group( | ||
| lambda: cirq.experiments.T1DecayResult( | ||
| data=pd.DataFrame( | ||
| columns=['delay_ns', 'false_count', 'true_count'], index=[0], data=[[100.0, 2, 8]] | ||
| columns=['delay_ns', 'false_count', 'true_count'], index=[0], data=[[100, 2, 8]] | ||
| ) | ||
| ) | ||
| ) | ||
|
|
@@ -103,7 +103,7 @@ def noisy_moment(self, moment, system_qubits): | |
| data=pd.DataFrame( | ||
| columns=['delay_ns', 'false_count', 'true_count'], | ||
| index=range(4), | ||
| data=[[100.0, 0, 10], [400.0, 0, 10], [700.0, 10, 0], [1000.0, 10, 0]], | ||
| data=[[100.0, 0, 10], [215.0, 0, 10], [464.0, 0, 10], [1000.0, 10, 0]], | ||
| ) | ||
| ) | ||
|
|
||
|
|
@@ -117,13 +117,14 @@ def test_all_on_results(): | |
| min_delay=cirq.Duration(nanos=100), | ||
| max_delay=cirq.Duration(micros=1), | ||
| ) | ||
| assert results == cirq.experiments.T1DecayResult( | ||
| desired = cirq.experiments.T1DecayResult( | ||
| data=pd.DataFrame( | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. can you change this line to so that we get a more informative error message.
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I tried this, and it just outputs
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I suspect it may have something to do with data types since I changed line 83 in
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. looks like it's indeed a types issue specifically for the >>> results.data.dtypes
delay_ns float64
false_count int64
true_count int64
dtype: object
>>> desired.data.dtypes
delay_ns int64
false_count int64
true_count int64
dtype: object
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. That's pretty strange. When I run it on linux,
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the result is computed by Why do you want to force
Collaborator
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I changed it back to |
||
| columns=['delay_ns', 'false_count', 'true_count'], | ||
| index=range(4), | ||
| data=[[100.0, 0, 10], [400.0, 0, 10], [700.0, 0, 10], [1000.0, 0, 10]], | ||
| data=[[100.0, 0, 10], [215.0, 0, 10], [464.0, 0, 10], [1000.0, 0, 10]], | ||
| ) | ||
| ) | ||
| assert results == desired, f'{results.data=} {desired.data=}' | ||
|
|
||
|
|
||
| def test_all_off_results(): | ||
|
|
@@ -135,13 +136,14 @@ def test_all_off_results(): | |
| min_delay=cirq.Duration(nanos=100), | ||
| max_delay=cirq.Duration(micros=1), | ||
| ) | ||
| assert results == cirq.experiments.T1DecayResult( | ||
| desired = cirq.experiments.T1DecayResult( | ||
| data=pd.DataFrame( | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same as above |
||
| columns=['delay_ns', 'false_count', 'true_count'], | ||
| index=range(4), | ||
| data=[[100.0, 10, 0], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], | ||
| data=[[100.0, 10, 0], [215.0, 10, 0], [464.0, 10, 0], [1000.0, 10, 0]], | ||
| ) | ||
| ) | ||
| assert results == desired, f'{results.data=} {desired.data=}' | ||
|
|
||
|
|
||
| @pytest.mark.usefixtures('closefigures') | ||
|
|
@@ -150,28 +152,14 @@ def test_curve_fit_plot_works(): | |
| data=pd.DataFrame( | ||
| columns=['delay_ns', 'false_count', 'true_count'], | ||
| index=range(4), | ||
| data=[[100.0, 6, 4], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], | ||
| data=[[100.0, 6, 4], [215.0, 10, 0], [464.0, 10, 0], [1000.0, 10, 0]], | ||
| ) | ||
| ) | ||
|
|
||
| good_fit.plot(include_fit=True) | ||
|
|
||
|
|
||
| @pytest.mark.usefixtures('closefigures') | ||
| def test_curve_fit_plot_warning(): | ||
| bad_fit = cirq.experiments.T1DecayResult( | ||
| data=pd.DataFrame( | ||
| columns=['delay_ns', 'false_count', 'true_count'], | ||
| index=range(4), | ||
| data=[[100.0, 10, 0], [400.0, 10, 0], [700.0, 10, 0], [1000.0, 10, 0]], | ||
| ) | ||
| ) | ||
|
|
||
| with pytest.warns(RuntimeWarning, match='Optimal parameters could not be found for curve fit'): | ||
| bad_fit.plot(include_fit=True) | ||
|
|
||
|
|
||
| @pytest.mark.parametrize('t1', [200, 500, 700]) | ||
| @pytest.mark.parametrize('t1', [200.0, 500.0, 700.0]) | ||
| def test_noise_model_continous(t1): | ||
| class GradualDecay(cirq.NoiseModel): | ||
| def __init__(self, t1: float): | ||
|
|
@@ -196,10 +184,10 @@ def noisy_moment(self, moment, system_qubits): | |
| results = cirq.experiments.t1_decay( | ||
| sampler=cirq.DensityMatrixSimulator(noise=GradualDecay(t1)), | ||
| qubit=cirq.GridQubit(0, 0), | ||
| num_points=4, | ||
| num_points=10, | ||
| repetitions=10, | ||
| min_delay=cirq.Duration(nanos=100), | ||
| max_delay=cirq.Duration(micros=1), | ||
| min_delay=cirq.Duration(nanos=1), | ||
| max_delay=cirq.Duration(micros=10), | ||
| ) | ||
|
|
||
| assert np.isclose(results.constant, t1, 50) | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
use the original parameter to make it clear that this happens when no
min_delayis suppliedThere was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I want to keep it as is in case the user specifies 0 min delay.