Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Real interferometer #132

Merged
merged 24 commits into from
May 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 16 additions & 14 deletions .github/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,20 +5,6 @@
[(#128)](https://github.com/XanaduAI/MrMustard/issues/128)

* States in Gaussian and Fock representation now can be concatenated.
[(#130)](https://github.com/XanaduAI/MrMustard/pull/130)

* Parameter passthrough allows to use custom parameters in the model, that is, objects accept correlated parameters. For example,
```python
from mrmustard.lab.gates import Sgate, BSgate

BS = BSgate(theta=np.pi/4, theta_trainable=True)[0,1]
S0 = Sgate(r=BS.theta)[0]
S1 = Sgate(r=-BS.theta)[1]

circ = S0 >> S1 >> BS
```
[(#131)](https://github.com/XanaduAI/MrMustard/pull/131)

```python
from mrmustard.lab.states import Gaussian, Fock'
from mrmustard.lab.gates import Attenuator
Expand All @@ -35,6 +21,22 @@

mixed_state.dm()
```
[(#130)](https://github.com/XanaduAI/MrMustard/pull/130)

* Parameter passthrough allows to use custom parameters in the model, that is, objects accept correlated parameters. For example,
```python
from mrmustard.lab.gates import Sgate, BSgate

BS = BSgate(theta=np.pi/4, theta_trainable=True)[0,1]
S0 = Sgate(r=BS.theta)[0]
S1 = Sgate(r=-BS.theta)[1]

circ = S0 >> S1 >> BS
```
[(#131)](https://github.com/XanaduAI/MrMustard/pull/131)

* Adds the new trainable gate `RealInterferometer`: an interferometer that doesn't mix the q and p quadratures
[(#132)](https://github.com/XanaduAI/MrMustard/pull/132)

### Breaking changes

Expand Down
68 changes: 62 additions & 6 deletions mrmustard/lab/gates.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,12 @@
from mrmustard import settings
from mrmustard.lab.abstract import Transformation
from mrmustard.utils.parametrized import Parametrized
from mrmustard.utils import training
from mrmustard.physics import gaussian

from mrmustard.math import Math

math = Math()

__all__ = [
"Dgate",
"Sgate",
Expand All @@ -38,6 +41,7 @@
"CZgate",
"CXgate",
"Interferometer",
"RealInterferometer",
"Attenuator",
"Amplifier",
"AdditiveNoise",
Expand Down Expand Up @@ -429,7 +433,8 @@ class Interferometer(Parametrized, Transformation):
It corresponds to a Ggate with zero mean and a ``2N x 2N`` orthogonal symplectic matrix.

Args:
orthogonal (2d array): a valid orthogonal matrix. For N modes it must have shape `(2N,2N)`
orthogonal (2d array, optional): a valid orthogonal matrix. For N modes it must have shape `(2N,2N)`.
If set to `None` a random orthogonal matrix is used.
orthogonal_trainable (bool): whether orthogonal is a trainable variable
"""

Expand All @@ -440,7 +445,8 @@ def __init__(
orthogonal_trainable: bool = False,
):
if orthogonal is None:
orthogonal = training.new_orthogonal(num_modes=num_modes)
U = math.random_unitary(num_modes)
orthogonal = math.block([[math.real(U), -math.imag(U)], [math.imag(U), math.real(U)]])
super().__init__(
orthogonal=orthogonal,
orthogonal_trainable=orthogonal_trainable,
Expand All @@ -454,9 +460,59 @@ def X_matrix(self):
return self.orthogonal

def _validate_modes(self, modes):
if len(modes) != self.orthogonal.shape[1] // 2:
if len(modes) != self.orthogonal.shape[-1] // 2:
raise ValueError(
f"Invalid number of modes: {len(modes)} (should be {self.orthogonal.shape[-1] // 2})"
)

@property
def trainable_parameters(self) -> Dict[str, List[Trainable]]:
return {
"symplectic": [],
"orthogonal": [self.orthogonal] if self._orthogonal_trainable else [],
"euclidean": [],
}


class RealInterferometer(Parametrized, Transformation):
r"""N-mode interferometer with a real unitary matrix (or block-diagonal orthogonal matrix).
Does not mix q's and p's.

Args:
orthogonal (2d array, optional): a valid orthogonal matrix. For N modes it must have shape `(N,N)`.
If set to `None` a random orthogonal matrix is used.
orthogonal_trainable (bool): whether orthogonal is a trainable variable
"""

def __init__(
self,
num_modes: int,
orthogonal: Optional[Tensor] = None,
orthogonal_trainable: bool = False,
):
if orthogonal is None:
orthogonal = math.random_orthogonal(num_modes)
super().__init__(
orthogonal=orthogonal,
orthogonal_trainable=orthogonal_trainable,
orthogonal_bounds=(None, None),
modes=list(range(num_modes)),
)
self.is_gaussian = True

@property
def X_matrix(self):
return math.block(
[
[self.orthogonal, math.zeros_like(self.orthogonal)],
[math.zeros_like(self.orthogonal), self.orthogonal],
]
)

def _validate_modes(self, modes):
if len(modes) != self.orthogonal.shape[-1]:
raise ValueError(
f"Invalid number of modes: {len(modes)} (should be {self.orthogonal.shape[1] // 2})"
f"Invalid number of modes: {len(modes)} (should be {self.orthogonal.shape[-1]})"
)

@property
Expand Down Expand Up @@ -487,7 +543,7 @@ def __init__(
symplectic_trainable: bool = False,
):
if symplectic is None:
symplectic = training.new_symplectic(num_modes=num_modes)
symplectic = math.random_symplectic(num_modes)
super().__init__(
symplectic=symplectic,
symplectic_trainable=symplectic_trainable,
Expand Down
3 changes: 1 addition & 2 deletions mrmustard/lab/states.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
from mrmustard.lab.abstract import State
from mrmustard.physics import gaussian, fock
from mrmustard.utils.parametrized import Parametrized
from mrmustard.utils import training
from mrmustard.math import Math

math = Math()
Expand Down Expand Up @@ -421,7 +420,7 @@ def __init__(
normalize: bool = False,
):
if symplectic is None:
symplectic = training.new_symplectic(num_modes=num_modes)
symplectic = math.random_symplectic(num_modes=num_modes)
if eigenvalues is None:
eigenvalues = gaussian.math.ones(num_modes) * settings.HBAR / 2
if math.any(math.atleast_1d(eigenvalues) < settings.HBAR / 2):
Expand Down
Loading