Skip to content

Commit

Permalink
add constructors
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmoutinho committed Feb 9, 2024
1 parent c0822a5 commit a4c4f6f
Show file tree
Hide file tree
Showing 9 changed files with 1,468 additions and 0 deletions.
24 changes: 24 additions & 0 deletions qadence_libs/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from __future__ import annotations

from importlib import import_module

from .constructors import *

"""Fetch the functions defined in the __all__ of each sub-module.
Import to the qadence name space. Make sure each added submodule has the respective definition:
- `__all__ = ["function0", "function1", ...]`
Furthermore, add the submodule to the list below to automatically build
the __all__ of the qadence namespace. Make sure to keep alphabetical ordering.
"""

list_of_submodules = [
".constructors",
]

__all__ = []
for submodule in list_of_submodules:
__all_submodule__ = getattr(import_module(submodule, package="qadence"), "__all__")
__all__ += __all_submodule__
35 changes: 35 additions & 0 deletions qadence_libs/constructors/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# flake8: noqa

from .feature_maps import (
feature_map,
chebyshev_feature_map,
fourier_feature_map,
tower_feature_map,
exp_fourier_feature_map,
)

from .hea import hea

from .iia import identity_initialized_ansatz

from .rydberg_hea import rydberg_hea, rydberg_hea_layer
from .rydberg_feature_maps import rydberg_feature_map, analog_feature_map, rydberg_tower_feature_map

from .qft import qft

# Modules to be automatically added to the qadence namespace
__all__ = [
"feature_map",
"chebyshev_feature_map",
"fourier_feature_map",
"tower_feature_map",
"exp_fourier_feature_map",
"hea",
"identity_initialized_ansatz",
"qft",
"rydberg_hea",
"rydberg_hea_layer",
"rydberg_feature_map",
"analog_feature_map",
"rydberg_tower_feature_map",
]
317 changes: 317 additions & 0 deletions qadence_libs/constructors/feature_maps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,317 @@
from __future__ import annotations

import warnings
from collections.abc import Callable
from math import isclose
from typing import Union

from qadence.blocks import AbstractBlock, KronBlock, chain, kron, tag
from qadence.logger import get_logger
from qadence.operations import PHASE, RX, RY, RZ, H
from qadence.parameters import FeatureParameter, Parameter, VariationalParameter
from qadence.types import PI, BasisSet, ReuploadScaling, TParameter
from sympy import Basic, acos

logger = get_logger(__name__)

ROTATIONS = [RX, RY, RZ, PHASE]
RotationTypes = type[Union[RX, RY, RZ, PHASE]]


def _set_range(fm_type: BasisSet | Callable | str) -> tuple[float, float]:
if fm_type == BasisSet.FOURIER:
return (0.0, 2 * PI)
elif fm_type == BasisSet.CHEBYSHEV:
return (-1.0, 1.0)
else:
return (0.0, 1.0)


RS_FUNC_DICT = {
ReuploadScaling.CONSTANT: lambda i: 1,
ReuploadScaling.TOWER: lambda i: float(i + 1),
ReuploadScaling.EXP: lambda i: float(2**i),
}


def backwards_compatibility(
fm_type: BasisSet | Callable | str,
reupload_scaling: ReuploadScaling | Callable | str,
) -> tuple:
if fm_type in ("fourier", "chebyshev", "tower"):
logger.warning(
"Selecting `fm_type` as 'fourier', 'chebyshev' or 'tower' is deprecated. "
"Please use the respective enumerations: 'fm_type = BasisSet.FOURIER', "
"'fm_type = BasisSet.CHEBYSHEV' or 'reupload_scaling = ReuploadScaling.TOWER'."
)
if fm_type == "fourier":
fm_type = BasisSet.FOURIER
elif fm_type == "chebyshev":
fm_type = BasisSet.CHEBYSHEV
elif fm_type == "tower":
fm_type = BasisSet.CHEBYSHEV
reupload_scaling = ReuploadScaling.TOWER

return fm_type, reupload_scaling


def fm_parameter_scaling(
fm_type: BasisSet | Callable | str,
param: Parameter | str = "phi",
feature_range: tuple[float, float] | None = None,
target_range: tuple[float, float] | None = None,
) -> Parameter | Basic:
if isinstance(param, Parameter):
fparam = param
fparam.trainable = False
else:
fparam = FeatureParameter(param)

# Set feature and target range
feature_range = _set_range(fm_type) if feature_range is None else feature_range
target_range = _set_range(fm_type) if target_range is None else target_range

# Rescale the feature parameter
scaling = (max(target_range) - min(target_range)) / (max(feature_range) - min(feature_range))
shift = min(target_range) - min(feature_range) * scaling

if isclose(scaling, 1.0):
# So we don't get 1.0 factor in visualization
scaled_fparam = fparam + shift
else:
scaled_fparam = scaling * fparam + shift

return scaled_fparam


def fm_parameter_func(fm_type: BasisSet | Callable | str) -> Callable:
def ident_fn(x: TParameter) -> TParameter:
return x

# Transform feature parameter
if fm_type == BasisSet.FOURIER:
transform_func = ident_fn
elif fm_type == BasisSet.CHEBYSHEV:
transform_func = acos
elif callable(fm_type):
transform_func = fm_type
else:
raise NotImplementedError(
f"Feature map type {fm_type} not implemented. Choose an item from the BasisSet "
f"enum: {BasisSet.list()}, or a custom defined sympy function to wrap "
"the given feature parameter with."
)

return transform_func


def fm_reupload_scaling_fn(
reupload_scaling: ReuploadScaling | Callable | str = ReuploadScaling.CONSTANT,
) -> tuple[Callable, str]:
# Set reupload scaling function
if callable(reupload_scaling):
rs_func = reupload_scaling
rs_tag = "Custom"
else:
rs_func = RS_FUNC_DICT.get(reupload_scaling, None) # type: ignore [call-overload]
if rs_func is None:
raise NotImplementedError(
f"Reupload scaling {reupload_scaling} not implemented; choose an item from "
f"the ReuploadScaling enum: {[rs.name for rs in ReuploadScaling]}, or your own "
"python function with a single int arg as input and int or float output."
)
if isinstance(reupload_scaling, ReuploadScaling):
rs_tag = reupload_scaling.value
else:
rs_tag = reupload_scaling

return rs_func, rs_tag


def feature_map(
n_qubits: int,
support: tuple[int, ...] | None = None,
param: Parameter | str = "phi",
op: RotationTypes = RX,
fm_type: BasisSet | Callable | str = BasisSet.FOURIER,
reupload_scaling: ReuploadScaling | Callable | str = ReuploadScaling.CONSTANT,
feature_range: tuple[float, float] | None = None,
target_range: tuple[float, float] | None = None,
multiplier: Parameter | TParameter | None = None,
param_prefix: str | None = None,
) -> KronBlock:
"""Construct a feature map of a given type.
Arguments:
n_qubits: Number of qubits the feature map covers. Results in `support=range(n_qubits)`.
support: Puts one feature-encoding rotation gate on every qubit in `support`. n_qubits in
this case specifies the total overall qubits of the circuit, which may be wider than the
support itself, but not narrower.
param: Parameter of the feature map; you can pass a string or Parameter;
it will be set as non-trainable (FeatureParameter) regardless.
op: Rotation operation of the feature map; choose from RX, RY, RZ or PHASE.
fm_type: Basis set for data encoding; choose from `BasisSet.FOURIER` for Fourier
encoding, or `BasisSet.CHEBYSHEV` for Chebyshev polynomials of the first kind.
reupload_scaling: how the feature map scales the data that is re-uploaded for each qubit.
choose from `ReuploadScaling` enumeration or provide your own function with a single
int as input and int or float as output.
feature_range: range of data that the input data provided comes from. Used to map input data
to the correct domain of the feature-encoding function.
target_range: range of data the data encoder assumes as the natural range. For example,
in Chebyshev polynomials it is (-1, 1), while for Fourier it may be chosen as (0, 2*PI).
Used to map data to the correct domain of the feature-encoding function.
multiplier: overall multiplier; this is useful for reuploading the feature map serially with
different scalings; can be a number or parameter/expression.
param_prefix: string prefix to create trainable parameters multiplying the feature parameter
inside the feature-encoding function. Note that currently this does not take into
account the domain of the feature-encoding function.
Example:
```python exec="on" source="material-block" result="json"
from qadence import feature_map, BasisSet, ReuploadScaling
fm = feature_map(3, fm_type=BasisSet.FOURIER)
print(f"{fm = }")
fm = feature_map(3, fm_type=BasisSet.CHEBYSHEV)
print(f"{fm = }")
fm = feature_map(3, fm_type=BasisSet.FOURIER, reupload_scaling = ReuploadScaling.TOWER)
print(f"{fm = }")
```
"""

# Process input
if support is None:
support = tuple(range(n_qubits))
elif len(support) != n_qubits:
raise ValueError("Wrong qubit support supplied")

if op not in ROTATIONS:
raise ValueError(
f"Operation {op} not supported. "
f"Please provide one from {[rot.__name__ for rot in ROTATIONS]}."
)

# Backwards compatibility
fm_type, reupload_scaling = backwards_compatibility(fm_type, reupload_scaling)

scaled_fparam = fm_parameter_scaling(
fm_type, param, feature_range=feature_range, target_range=target_range
)

transform_func = fm_parameter_func(fm_type)

basis_tag = fm_type.value if isinstance(fm_type, BasisSet) else str(fm_type)
rs_func, rs_tag = fm_reupload_scaling_fn(reupload_scaling)

# Set overall multiplier
multiplier = 1 if multiplier is None else Parameter(multiplier)

# Build feature map
op_list = []
fparam = scaled_fparam
for i, qubit in enumerate(support):
if param_prefix is not None:
train_param = VariationalParameter(param_prefix + f"_{i}")
fparam = train_param * scaled_fparam
op_list.append(op(qubit, multiplier * rs_func(i) * transform_func(fparam)))
fm = kron(*op_list)

fm.tag = rs_tag + " " + basis_tag + " FM"

return fm


def fourier_feature_map(
n_qubits: int, support: tuple[int, ...] = None, param: str = "phi", op: RotationTypes = RX
) -> AbstractBlock:
"""Construct a Fourier feature map.
Args:
n_qubits: number of qubits across which the FM is created
param: The base name for the feature `Parameter`
"""
warnings.warn(
"Function 'fourier_feature_map' is deprecated. Please use 'feature_map' directly.",
FutureWarning,
)
fm = feature_map(n_qubits, support=support, param=param, op=op, fm_type=BasisSet.FOURIER)
return fm


def chebyshev_feature_map(
n_qubits: int, support: tuple[int, ...] = None, param: str = "phi", op: RotationTypes = RX
) -> AbstractBlock:
"""Construct a Chebyshev feature map.
Args:
n_qubits: number of qubits across which the FM is created
support (Iterable[int]): The qubit support
param: The base name for the feature `Parameter`
"""
warnings.warn(
"Function 'chebyshev_feature_map' is deprecated. Please use 'feature_map' directly.",
FutureWarning,
)
fm = feature_map(n_qubits, support=support, param=param, op=op, fm_type=BasisSet.CHEBYSHEV)
return fm


def tower_feature_map(
n_qubits: int, support: tuple[int, ...] = None, param: str = "phi", op: RotationTypes = RX
) -> AbstractBlock:
"""Construct a Chebyshev tower feature map.
Args:
n_qubits: number of qubits across which the FM is created
param: The base name for the feature `Parameter`
"""
warnings.warn(
"Function 'tower_feature_map' is deprecated. Please use feature_map directly.",
FutureWarning,
)
fm = feature_map(
n_qubits,
support=support,
param=param,
op=op,
fm_type=BasisSet.CHEBYSHEV,
reupload_scaling=ReuploadScaling.TOWER,
)
return fm


def exp_fourier_feature_map(
n_qubits: int,
support: tuple[int, ...] = None,
param: str = "x",
feature_range: tuple[float, float] = None,
) -> AbstractBlock:
"""
Exponential fourier feature map.
Args:
n_qubits: number of qubits in the feature
support: qubit support
param: name of feature `Parameter`
feature_range: min and max value of the feature, as floats in a Tuple
"""

if feature_range is None:
feature_range = (0.0, 2.0**n_qubits)

support = tuple(range(n_qubits)) if support is None else support
hlayer = kron(H(qubit) for qubit in support)
rlayer = feature_map(
n_qubits,
support=support,
param=param,
op=RZ,
fm_type=BasisSet.FOURIER,
reupload_scaling=ReuploadScaling.EXP,
feature_range=feature_range,
target_range=(0.0, 2 * PI),
)
rlayer.tag = None
return tag(chain(hlayer, rlayer), f"ExpFourierFM({param})")
Loading

0 comments on commit a4c4f6f

Please sign in to comment.