diff --git a/src/braket/annealing/problem.py b/src/braket/annealing/problem.py index 4959df52e..d8b40c372 100644 --- a/src/braket/annealing/problem.py +++ b/src/braket/annealing/problem.py @@ -37,16 +37,16 @@ class Problem: def __init__( self, problem_type: ProblemType, - linear: Dict[int, float] = None, - quadratic: Dict[Tuple[int, int], float] = None, + linear: Dict[int, float] | None = None, + quadratic: Dict[Tuple[int, int], float] | None = None, ): """ Args: problem_type (ProblemType): The type of annealing problem - linear (Dict[int, float]): The linear terms of this problem, + linear (Dict[int, float] | None): The linear terms of this problem, as a map of variable to coefficient - quadratic (Dict[Tuple[int, int], float]): The quadratic terms of this problem, + quadratic (Dict[Tuple[int, int], float] | None): The quadratic terms of this problem, as a map of variables to coefficient Examples: diff --git a/src/braket/aws/aws_quantum_job.py b/src/braket/aws/aws_quantum_job.py index 76ff0648c..8ae4bc129 100644 --- a/src/braket/aws/aws_quantum_job.py +++ b/src/braket/aws/aws_quantum_job.py @@ -64,22 +64,22 @@ def create( cls, device: str, source_module: str, - entry_point: str = None, - image_uri: str = None, - job_name: str = None, - code_location: str = None, - role_arn: str = None, + entry_point: str | None = None, + image_uri: str | None = None, + job_name: str | None = None, + code_location: str | None = None, + role_arn: str | None = None, wait_until_complete: bool = False, - hyperparameters: dict[str, Any] = None, - input_data: str | dict | S3DataSourceConfig = None, - instance_config: InstanceConfig = None, - distribution: str = None, - stopping_condition: StoppingCondition = None, - output_data_config: OutputDataConfig = None, - copy_checkpoints_from_job: str = None, - checkpoint_config: CheckpointConfig = None, - aws_session: AwsSession = None, - tags: dict[str, str] = None, + hyperparameters: dict[str, Any] | None = None, + input_data: str | dict | S3DataSourceConfig | None = None, + instance_config: InstanceConfig | None = None, + distribution: str | None = None, + stopping_condition: StoppingCondition | None = None, + output_data_config: OutputDataConfig | None = None, + copy_checkpoints_from_job: str | None = None, + checkpoint_config: CheckpointConfig | None = None, + aws_session: AwsSession | None = None, + tags: dict[str, str] | None = None, logger: Logger = getLogger(__name__), ) -> AwsQuantumJob: """Creates a hybrid job by invoking the Braket CreateJob API. @@ -96,37 +96,38 @@ def create( tarred and uploaded. If `source_module` is an S3 URI, it must point to a tar.gz file. Otherwise, source_module may be a file or directory. - entry_point (str): A str that specifies the entry point of the hybrid job, relative to - the source module. The entry point must be in the format + entry_point (str | None): A str that specifies the entry point of the hybrid job, + relative to the source module. The entry point must be in the format `importable.module` or `importable.module:callable`. For example, `source_module.submodule:start_here` indicates the `start_here` function contained in `source_module.submodule`. If source_module is an S3 URI, entry point must be given. Default: source_module's name - image_uri (str): A str that specifies the ECR image to use for executing the hybrid job. - `image_uris.retrieve_image()` function may be used for retrieving the ECR image URIs - for the containers supported by Braket. Default = ``. + image_uri (str | None): A str that specifies the ECR image to use for executing the + hybrid job. `image_uris.retrieve_image()` function may be used for retrieving the + ECR image URIs for the containers supported by Braket. + Default = ``. - job_name (str): A str that specifies the name with which the hybrid job is created. - Allowed pattern for hybrid job name: `^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,50}$` + job_name (str | None): A str that specifies the name with which the hybrid job is + created. Allowed pattern for hybrid job name: `^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,50}$` Default: f'{image_uri_type}-{timestamp}'. - code_location (str): The S3 prefix URI where custom code will be uploaded. + code_location (str | None): The S3 prefix URI where custom code will be uploaded. Default: f's3://{default_bucket_name}/jobs/{job_name}/script'. - role_arn (str): A str providing the IAM role ARN used to execute the + role_arn (str | None): A str providing the IAM role ARN used to execute the script. Default: IAM role returned by AwsSession's `get_default_jobs_role()`. wait_until_complete (bool): `True` if we should wait until the hybrid job completes. This would tail the hybrid job logs as it waits. Otherwise `False`. Default: `False`. - hyperparameters (dict[str, Any]): Hyperparameters accessible to the hybrid job. + hyperparameters (dict[str, Any] | None): Hyperparameters accessible to the hybrid job. The hyperparameters are made accessible as a dict[str, str] to the hybrid job. For convenience, this accepts other types for keys and values, but `str()` is called to convert them before being passed on. Default: None. - input_data (str | dict | S3DataSourceConfig): Information about the training + input_data (str | dict | S3DataSourceConfig | None): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local @@ -134,39 +135,40 @@ def create( channel name "input". Default: {}. - instance_config (InstanceConfig): Configuration of the instance(s) for running the - classical code for the hybrid job. Default: + instance_config (InstanceConfig | None): Configuration of the instance(s) for running + the classical code for the hybrid job. Default: `InstanceConfig(instanceType='ml.m5.large', instanceCount=1, volumeSizeInGB=30)`. - distribution (str): A str that specifies how the hybrid job should be distributed. - If set to "data_parallel", the hyperparameters for the hybrid job will be set - to use data parallelism features for PyTorch or TensorFlow. Default: None. + distribution (str | None): A str that specifies how the hybrid job should be + distributed. If set to "data_parallel", the hyperparameters for the hybrid job will + be set to use data parallelism features for PyTorch or TensorFlow. Default: None. - stopping_condition (StoppingCondition): The maximum length of time, in seconds, + stopping_condition (StoppingCondition | None): The maximum length of time, in seconds, and the maximum number of quantum tasks that a hybrid job can run before being forcefully stopped. Default: StoppingCondition(maxRuntimeInSeconds=5 * 24 * 60 * 60). - output_data_config (OutputDataConfig): Specifies the location for the output of the - hybrid job. + output_data_config (OutputDataConfig | None): Specifies the location for the output of + the hybrid job. Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', kmsKeyId=None). - copy_checkpoints_from_job (str): A str that specifies the hybrid job ARN whose + copy_checkpoints_from_job (str | None): A str that specifies the hybrid job ARN whose checkpoint you want to use in the current hybrid job. Specifying this value will copy over the checkpoint data from `use_checkpoints_from_job`'s checkpoint_config s3Uri to the current hybrid job's checkpoint_config s3Uri, making it available at checkpoint_config.localPath during the hybrid job execution. Default: None - checkpoint_config (CheckpointConfig): Configuration that specifies the location where - checkpoint data is stored. + checkpoint_config (CheckpointConfig | None): Configuration that specifies the location + where checkpoint data is stored. Default: CheckpointConfig(localPath='/opt/jobs/checkpoints', s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints'). - aws_session (AwsSession): AwsSession for connecting to AWS Services. + aws_session (AwsSession | None): AwsSession for connecting to AWS Services. Default: AwsSession() - tags (dict[str, str]): Dict specifying the key-value pairs for tagging this hybrid job. + tags (dict[str, str] | None): Dict specifying the key-value pairs for tagging this + hybrid job. Default: {}. logger (Logger): Logger object with which to write logs, such as quantum task statuses @@ -210,11 +212,11 @@ def create( return job - def __init__(self, arn: str, aws_session: AwsSession = None): + def __init__(self, arn: str, aws_session: AwsSession | None = None): """ Args: arn (str): The ARN of the hybrid job. - aws_session (AwsSession): The `AwsSession` for connecting to AWS services. + aws_session (AwsSession | None): The `AwsSession` for connecting to AWS services. Default is `None`, in which case an `AwsSession` object will be created with the region of the hybrid job. """ @@ -486,7 +488,7 @@ def _read_and_deserialize_results(temp_dir: str, job_name: str) -> dict[str, Any def download_result( self, - extract_to: str = None, + extract_to: str | None = None, poll_timeout_seconds: float = QuantumJob.DEFAULT_RESULTS_POLL_TIMEOUT, poll_interval_seconds: float = QuantumJob.DEFAULT_RESULTS_POLL_INTERVAL, ) -> None: @@ -495,7 +497,7 @@ def download_result( the results are extracted to the current directory. Args: - extract_to (str): The directory to which the results are extracted. The results + extract_to (str | None): The directory to which the results are extracted. The results are extracted to a folder titled with the hybrid job name within this directory. Default= `Current working directory`. poll_timeout_seconds (float): The polling timeout, in seconds, for `download_result()`. diff --git a/src/braket/aws/aws_quantum_task.py b/src/braket/aws/aws_quantum_task.py index 346816e71..fa58d911d 100644 --- a/src/braket/aws/aws_quantum_task.py +++ b/src/braket/aws/aws_quantum_task.py @@ -100,11 +100,11 @@ def create( ], s3_destination_folder: AwsSession.S3DestinationFolder, shots: int, - device_parameters: dict[str, Any] = None, + device_parameters: dict[str, Any] | None = None, disable_qubit_rewiring: bool = False, - tags: dict[str, str] = None, - inputs: dict[str, float] = None, - gate_definitions: Optional[dict[tuple[Gate, QubitSet], PulseSequence]] = None, + tags: dict[str, str] | None = None, + inputs: dict[str, float] | None = None, + gate_definitions: Optional[dict[tuple[Gate, QubitSet], PulseSequence]] | None = None, *args, **kwargs, ) -> AwsQuantumTask: @@ -129,7 +129,7 @@ def create( `shots=0` is only available on simulators and means that the simulator will compute the exact results based on the quantum task specification. - device_parameters (dict[str, Any]): Additional parameters to send to the device. + device_parameters (dict[str, Any] | None): Additional parameters to send to the device. disable_qubit_rewiring (bool): Whether to run the circuit with the exact qubits chosen, without any rewiring downstream, if this is supported by the device. @@ -137,15 +137,15 @@ def create( If ``True``, no qubit rewiring is allowed; if ``False``, qubit rewiring is allowed. Default: False - tags (dict[str, str]): Tags, which are Key-Value pairs to add to this quantum task. - An example would be: + tags (dict[str, str] | None): Tags, which are Key-Value pairs to add to this quantum + task. An example would be: `{"state": "washington"}` - inputs (dict[str, float]): Inputs to be passed along with the + inputs (dict[str, float] | None): Inputs to be passed along with the IR. If the IR supports inputs, the inputs will be updated with this value. Default: {}. - gate_definitions (Optional[dict[tuple[Gate, QubitSet], PulseSequence]]): + gate_definitions (Optional[dict[tuple[Gate, QubitSet], PulseSequence]] | None): A `Dict` for user defined gate calibration. The calibration is defined for for a particular `Gate` on a particular `QubitSet` and is represented by a `PulseSequence`. @@ -203,7 +203,7 @@ def create( def __init__( self, arn: str, - aws_session: AwsSession = None, + aws_session: AwsSession | None = None, poll_timeout_seconds: float = DEFAULT_RESULTS_POLL_TIMEOUT, poll_interval_seconds: float = DEFAULT_RESULTS_POLL_INTERVAL, logger: Logger = getLogger(__name__), @@ -211,7 +211,7 @@ def __init__( """ Args: arn (str): The ARN of the quantum task. - aws_session (AwsSession): The `AwsSession` for connecting to AWS services. + aws_session (AwsSession | None): The `AwsSession` for connecting to AWS services. Default is `None`, in which case an `AwsSession` object will be created with the region of the quantum task. poll_timeout_seconds (float): The polling timeout for `result()`. Default: 5 days. diff --git a/src/braket/aws/aws_quantum_task_batch.py b/src/braket/aws/aws_quantum_task_batch.py index d29533a95..adf15bda3 100644 --- a/src/braket/aws/aws_quantum_task_batch.py +++ b/src/braket/aws/aws_quantum_task_batch.py @@ -60,7 +60,7 @@ def __init__( max_workers: int = MAX_CONNECTIONS_DEFAULT, poll_timeout_seconds: float = AwsQuantumTask.DEFAULT_RESULTS_POLL_TIMEOUT, poll_interval_seconds: float = AwsQuantumTask.DEFAULT_RESULTS_POLL_INTERVAL, - inputs: Union[dict[str, float], list[dict[str, float]]] = None, + inputs: Union[dict[str, float], list[dict[str, float]]] | None = None, *aws_quantum_task_args, **aws_quantum_task_kwargs, ): @@ -88,7 +88,7 @@ def __init__( in seconds. Default: 5 days. poll_interval_seconds (float): The polling interval for results in seconds. Default: 1 second. - inputs (Union[dict[str, float], list[dict[str, float]]]): Inputs to be passed + inputs (Union[dict[str, float], list[dict[str, float]]] | None): Inputs to be passed along with the IR. If the IR supports inputs, the inputs will be updated with this value. Default: {}. """ diff --git a/src/braket/aws/aws_session.py b/src/braket/aws/aws_session.py index eabe06dc8..9523f0573 100644 --- a/src/braket/aws/aws_session.py +++ b/src/braket/aws/aws_session.py @@ -40,17 +40,17 @@ class AwsSession(object): def __init__( self, - boto_session: boto3.Session = None, - braket_client: client = None, - config: Config = None, - default_bucket: str = None, + boto_session: boto3.Session | None = None, + braket_client: client | None = None, + config: Config | None = None, + default_bucket: str | None = None, ): """ Args: - boto_session (Session): A boto3 session object. - braket_client (client): A boto3 Braket client. - config (Config): A botocore Config object. - default_bucket (str): The name of the default bucket of the AWS Session. + boto_session (Session | None): A boto3 session object. + braket_client (client | None): A boto3 Braket client. + config (Config | None): A botocore Config object. + default_bucket (str | None): The name of the default bucket of the AWS Session. """ if ( boto_session @@ -716,7 +716,7 @@ def describe_log_streams( self, log_group: str, log_stream_prefix: str, - limit: int = None, + limit: Optional[int] = None, next_token: Optional[str] = None, ) -> dict[str, Any]: """ @@ -725,7 +725,7 @@ def describe_log_streams( Args: log_group (str): Name of the log group. log_stream_prefix (str): Prefix for log streams to include. - limit (int): Limit for number of log streams returned. + limit (Optional[int]): Limit for number of log streams returned. default is 50. next_token (Optional[str]): The token for the next set of items to return. Would have been received in a previous call. diff --git a/src/braket/circuits/circuit.py b/src/braket/circuits/circuit.py index 99b3e8ecb..81139e9e4 100644 --- a/src/braket/circuits/circuit.py +++ b/src/braket/circuits/circuit.py @@ -117,10 +117,10 @@ def method_from_subroutine(self, *args, **kwargs) -> SubroutineReturn: function_attr = getattr(cls, function_name) setattr(function_attr, "__doc__", func.__doc__) - def __init__(self, addable: AddableTypes = None, *args, **kwargs): + def __init__(self, addable: AddableTypes | None = None, *args, **kwargs): """ Args: - addable (AddableTypes): The item(s) to add to self. + addable (AddableTypes | None): The item(s) to add to self. Default = None. Raises: @@ -235,18 +235,18 @@ def parameters(self) -> set[FreeParameter]: def add_result_type( self, result_type: ResultType, - target: QubitSetInput = None, - target_mapping: dict[QubitInput, QubitInput] = None, + target: QubitSetInput | None = None, + target_mapping: dict[QubitInput, QubitInput] | None = None, ) -> Circuit: """ Add a requested result type to `self`, returns `self` for chaining ability. Args: result_type (ResultType): `ResultType` to add into `self`. - target (QubitSetInput): Target qubits for the + target (QubitSetInput | None): Target qubits for the `result_type`. Default = `None`. - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (dict[QubitInput, QubitInput] | None): A dictionary of qubit mappings to apply to the `result_type.target`. Key is the qubit in `result_type.target` and the value is what the key will be changed to. Default = `None`. @@ -399,19 +399,19 @@ def _add_to_qubit_observable_set(self, result_type: ResultType) -> None: def add_instruction( self, instruction: Instruction, - target: QubitSetInput = None, - target_mapping: dict[QubitInput, QubitInput] = None, + target: QubitSetInput | None = None, + target_mapping: dict[QubitInput, QubitInput] | None = None, ) -> Circuit: """ Add an instruction to `self`, returns `self` for chaining ability. Args: instruction (Instruction): `Instruction` to add into `self`. - target (QubitSetInput): Target qubits for the + target (QubitSetInput | None): Target qubits for the `instruction`. If a single qubit gate, an instruction is created for every index in `target`. Default = `None`. - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (dict[QubitInput, QubitInput] | None): A dictionary of qubit mappings to apply to the `instruction.target`. Key is the qubit in `instruction.target` and the value is what the key will be changed to. Default = `None`. @@ -491,19 +491,19 @@ def _check_for_params(self, instruction: Instruction) -> bool: def add_circuit( self, circuit: Circuit, - target: QubitSetInput = None, - target_mapping: dict[QubitInput, QubitInput] = None, + target: QubitSetInput | None = None, + target_mapping: dict[QubitInput, QubitInput] | None = None, ) -> Circuit: """ Add a `circuit` to self, returns self for chaining ability. Args: circuit (Circuit): Circuit to add into self. - target (QubitSetInput): Target qubits for the + target (QubitSetInput | None): Target qubits for the supplied circuit. This is a macro over `target_mapping`; `target` is converted to a `target_mapping` by zipping together a sorted `circuit.qubits` and `target`. Default = `None`. - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (dict[QubitInput, QubitInput] | None): A dictionary of qubit mappings to apply to the qubits of `circuit.instructions`. Key is the qubit to map, and the value is what to change it to. Default = `None`. @@ -567,8 +567,8 @@ def add_circuit( def add_verbatim_box( self, verbatim_circuit: Circuit, - target: QubitSetInput = None, - target_mapping: dict[QubitInput, QubitInput] = None, + target: QubitSetInput | None = None, + target_mapping: dict[QubitInput, QubitInput] | None = None, ) -> Circuit: """ Add a verbatim `circuit` to self, that is, ensures that `circuit` is not modified in any way @@ -576,11 +576,11 @@ def add_verbatim_box( Args: verbatim_circuit (Circuit): Circuit to add into self. - target (QubitSetInput): Target qubits for the + target (QubitSetInput | None): Target qubits for the supplied circuit. This is a macro over `target_mapping`; `target` is converted to a `target_mapping` by zipping together a sorted `circuit.qubits` and `target`. Default = `None`. - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (dict[QubitInput, QubitInput] | None): A dictionary of qubit mappings to apply to the qubits of `circuit.instructions`. Key is the qubit to map, and the value is what to change it to. Default = `None`. @@ -638,7 +638,7 @@ def apply_gate_noise( self, noise: Union[type[Noise], Iterable[type[Noise]]], target_gates: Optional[Union[type[Gate], Iterable[type[Gate]]]] = None, - target_unitary: np.ndarray = None, + target_unitary: Optional[np.ndarray] = None, target_qubits: Optional[QubitSetInput] = None, ) -> Circuit: """Apply `noise` to the circuit according to `target_gates`, `target_unitary` and @@ -667,7 +667,7 @@ def apply_gate_noise( to the circuit. target_gates (Optional[Union[type[Gate], Iterable[type[Gate]]]]): Gate class or List of Gate classes which `noise` is applied to. Default=None. - target_unitary (ndarray): matrix of the target unitary gates. Default=None. + target_unitary (Optional[ndarray]): matrix of the target unitary gates. Default=None. target_qubits (Optional[QubitSetInput]): Index or indices of qubit(s). Default=None. @@ -1097,7 +1097,7 @@ def diagram(self, circuit_diagram_class: type = AsciiCircuitDiagram) -> str: def to_ir( self, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: Optional[SerializationProperties] = None, gate_definitions: Optional[dict[tuple[Gate, QubitSet], PulseSequence]] = None, ) -> Union[OpenQasmProgram, JaqcdProgram]: """ @@ -1107,9 +1107,10 @@ def to_ir( Args: ir_type (IRType): The IRType to use for converting the circuit object to its IR representation. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (Optional[SerializationProperties]): The serialization + properties to use while serializing the object to the IR representation. The + serialization properties supplied must correspond to the supplied `ir_type`. + Defaults to None. gate_definitions (Optional[dict[tuple[Gate, QubitSet], PulseSequence]]): The calibration data for the device. default: None. @@ -1490,12 +1491,12 @@ def __eq__(self, other): ) return NotImplemented - def __call__(self, arg: Any = None, **kwargs) -> Circuit: + def __call__(self, arg: Any | None = None, **kwargs) -> Circuit: """ Implements the call function to easily make a bound Circuit. Args: - arg (Any): A value to bind to all parameters. Defaults to None and + arg (Any | None): A value to bind to all parameters. Defaults to None and can be overridden if the parameter is in kwargs. Returns: diff --git a/src/braket/circuits/compiler_directive.py b/src/braket/circuits/compiler_directive.py index d5b5591fe..628422c7e 100644 --- a/src/braket/circuits/compiler_directive.py +++ b/src/braket/circuits/compiler_directive.py @@ -48,20 +48,20 @@ def ascii_symbols(self) -> tuple[str, ...]: def to_ir( self, - target: QubitSet = None, + target: QubitSet | None = None, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: SerializationProperties | None = None, **kwargs, ) -> Any: """Returns IR object of the compiler directive. Args: - target (QubitSet): target qubit(s). Defaults to None + target (QubitSet | None): target qubit(s). Defaults to None ir_type(IRType) : The IRType to use for converting the compiler directive object to its IR representation. Defaults to IRType.JAQCD. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (SerializationProperties | None): The serialization properties + to use while serializing the object to the IR representation. The serialization + properties supplied must correspond to the supplied `ir_type`. Defaults to None. Returns: Any: IR object of the compiler directive. diff --git a/src/braket/circuits/gate.py b/src/braket/circuits/gate.py index 718e3344d..3c59409e5 100644 --- a/src/braket/circuits/gate.py +++ b/src/braket/circuits/gate.py @@ -70,7 +70,7 @@ def to_ir( self, target: QubitSet, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: Optional[SerializationProperties] = None, *, control: Optional[QubitSet] = None, control_state: Optional[BasisStateInput] = None, @@ -82,9 +82,10 @@ def to_ir( target (QubitSet): target qubit(s). ir_type(IRType) : The IRType to use for converting the gate object to its IR representation. Defaults to IRType.JAQCD. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (Optional[SerializationProperties]): The serialization + properties to use while serializing the object to the IR representation. The + serialization properties supplied must correspond to the supplied `ir_type`. + Defaults to None. control (Optional[QubitSet]): Control qubit(s). Only supported for OpenQASM. Default None. control_state (Optional[BasisStateInput]): Quantum state on which to control the diff --git a/src/braket/circuits/instruction.py b/src/braket/circuits/instruction.py index 18dc49e4a..0b2f90d01 100644 --- a/src/braket/circuits/instruction.py +++ b/src/braket/circuits/instruction.py @@ -37,7 +37,7 @@ class Instruction: def __init__( self, operator: InstructionOperator, - target: QubitSetInput = None, + target: Optional[QubitSetInput] = None, *, control: Optional[QubitSetInput] = None, control_state: Optional[BasisStateInput] = None, @@ -48,7 +48,8 @@ def __init__( Args: operator (InstructionOperator): Operator for the instruction. - target (QubitSetInput): Target qubits that the operator is applied to. Default is None. + target (Optional[QubitSetInput]): Target qubits that the operator is applied to. + Default is None. control (Optional[QubitSetInput]): Target qubits that the operator is controlled on. Default is None. control_state (Optional[BasisStateInput]): Quantum state on which to control the @@ -165,7 +166,7 @@ def adjoint(self) -> list[Instruction]: def to_ir( self, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: SerializationProperties | None = None, ) -> Any: """ Converts the operator into the canonical intermediate representation. @@ -174,9 +175,9 @@ def to_ir( Args: ir_type(IRType) : The IRType to use for converting the instruction object to its IR representation. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (SerializationProperties | None): The serialization properties + to use while serializing the object to the IR representation. The serialization + properties supplied must correspond to the supplied `ir_type`. Defaults to None. Returns: Any: IR object of the instruction. @@ -201,10 +202,10 @@ def ascii_symbols(self) -> tuple[str, ...]: def copy( self, - target_mapping: dict[QubitInput, QubitInput] = None, - target: QubitSetInput = None, - control_mapping: dict[QubitInput, QubitInput] = None, - control: QubitSetInput = None, + target_mapping: Optional[dict[QubitInput, QubitInput]] = None, + target: Optional[QubitSetInput] = None, + control_mapping: Optional[dict[QubitInput, QubitInput]] = None, + control: Optional[QubitSetInput] = None, control_state: Optional[BasisStateInput] = None, power: float = 1, ) -> Instruction: @@ -217,14 +218,16 @@ def copy( Same relationship holds for `control_mapping`. Args: - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (Optional[dict[QubitInput, QubitInput]]): A dictionary of qubit mappings to apply to the target. Key is the qubit in this `target` and the value is what the key is changed to. Default = `None`. - target (QubitSetInput): Target qubits for the new instruction. Default is None. - control_mapping (dict[QubitInput, QubitInput]): A dictionary of + target (Optional[QubitSetInput]): Target qubits for the new instruction. + Default is None. + control_mapping (Optional[dict[QubitInput, QubitInput]]): A dictionary of qubit mappings to apply to the control. Key is the qubit in this `control` and the value is what the key is changed to. Default = `None`. - control (QubitSetInput): Control qubits for the new instruction. Default is None. + control (Optional[QubitSetInput]): Control qubits for the new instruction. + Default is None. control_state (Optional[BasisStateInput]): Quantum state on which to control the operation. Must be a binary sequence of same length as number of qubits in `control`. Will be ignored if `control` is not present. May be represented as a diff --git a/src/braket/circuits/moments.py b/src/braket/circuits/moments.py index a4c31339b..7cd8e0a9d 100644 --- a/src/braket/circuits/moments.py +++ b/src/braket/circuits/moments.py @@ -100,7 +100,7 @@ class Moments(Mapping[MomentsKey, Instruction]): Value: Instruction('operator': H, 'target': QubitSet([Qubit(1)])) """ - def __init__(self, instructions: Iterable[Instruction] = None): + def __init__(self, instructions: Iterable[Instruction] | None = None): self._moments: OrderedDict[MomentsKey, Instruction] = OrderedDict() self._max_times: dict[Qubit, int] = {} self._qubits = QubitSet() @@ -283,13 +283,13 @@ def values(self) -> ValuesView[Instruction]: self.sort_moments() return self._moments.values() - def get(self, key: MomentsKey, default: Any = None) -> Instruction: + def get(self, key: MomentsKey, default: Any | None = None) -> Instruction: """ Get the instruction in self by key. Args: key (MomentsKey): Key of the instruction to fetch. - default (Any): Value to return if `key` is not in `moments`. Default = `None`. + default (Any | None): Value to return if `key` is not in `moments`. Default = `None`. Returns: Instruction: `moments[key]` if `key` in `moments`, else `default` is returned. diff --git a/src/braket/circuits/noise.py b/src/braket/circuits/noise.py index 8e5b9cc73..af1bb422f 100644 --- a/src/braket/circuits/noise.py +++ b/src/braket/circuits/noise.py @@ -68,7 +68,7 @@ def to_ir( self, target: QubitSet, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: SerializationProperties | None = None, ) -> Any: """Returns IR object of quantum operator and target @@ -76,9 +76,9 @@ def to_ir( target (QubitSet): target qubit(s) ir_type(IRType) : The IRType to use for converting the noise object to its IR representation. Defaults to IRType.JAQCD. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (SerializationProperties | None): The serialization properties + to use while serializing the object to the IR representation. The serialization + properties supplied must correspond to the supplied `ir_type`. Defaults to None. Returns: Any: IR object of the quantum operator and target diff --git a/src/braket/circuits/observable.py b/src/braket/circuits/observable.py index 217bd5972..03cd0714e 100644 --- a/src/braket/circuits/observable.py +++ b/src/braket/circuits/observable.py @@ -47,19 +47,19 @@ def _unscaled(self) -> Observable: def to_ir( self, - target: QubitSet = None, + target: QubitSet | None = None, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: SerializationProperties | None = None, ) -> Union[str, list[Union[str, list[list[list[float]]]]]]: """Returns the IR representation for the observable Args: - target (QubitSet): target qubit(s). Defaults to None. + target (QubitSet | None): target qubit(s). Defaults to None. ir_type(IRType) : The IRType to use for converting the result type object to its IR representation. Defaults to IRType.JAQCD. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (SerializationProperties | None): The serialization properties + to use while serializing the object to the IR representation. The serialization + properties supplied must correspond to the supplied `ir_type`. Defaults to None. Returns: Union[str, list[Union[str, list[list[list[float]]]]]]: The IR representation for @@ -90,7 +90,9 @@ def _to_jaqcd(self) -> list[Union[str, list[list[list[float]]]]]: raise NotImplementedError("to_jaqcd has not been implemented yet.") def _to_openqasm( - self, serialization_properties: OpenQASMSerializationProperties, target: QubitSet = None + self, + serialization_properties: OpenQASMSerializationProperties, + target: QubitSet | None = None, ) -> str: """ Returns the openqasm string representation of the result type. @@ -98,7 +100,7 @@ def _to_openqasm( Args: serialization_properties (OpenQASMSerializationProperties): The serialization properties to use while serializing the object to the IR representation. - target (QubitSet): target qubit(s). Defaults to None. + target (QubitSet | None): target qubit(s). Defaults to None. Returns: str: Representing the openqasm representation of the result type. diff --git a/src/braket/circuits/result_type.py b/src/braket/circuits/result_type.py index a59d9ff3d..c6877262e 100644 --- a/src/braket/circuits/result_type.py +++ b/src/braket/circuits/result_type.py @@ -67,7 +67,7 @@ def name(self) -> str: def to_ir( self, ir_type: IRType = IRType.JAQCD, - serialization_properties: SerializationProperties = None, + serialization_properties: SerializationProperties | None = None, **kwargs, ) -> Any: """Returns IR object of the result type @@ -75,9 +75,9 @@ def to_ir( Args: ir_type(IRType) : The IRType to use for converting the result type object to its IR representation. Defaults to IRType.JAQCD. - serialization_properties (SerializationProperties): The serialization properties to use - while serializing the object to the IR representation. The serialization properties - supplied must correspond to the supplied `ir_type`. Defaults to None. + serialization_properties (SerializationProperties | None): The serialization properties + to use while serializing the object to the IR representation. The serialization + properties supplied must correspond to the supplied `ir_type`. Defaults to None. Returns: Any: IR object of the result type @@ -118,7 +118,9 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties raise NotImplementedError("to_openqasm has not been implemented yet.") def copy( - self, target_mapping: dict[QubitInput, QubitInput] = None, target: QubitSetInput = None + self, + target_mapping: dict[QubitInput, QubitInput] | None = None, + target: QubitSetInput | None = None, ) -> ResultType: """ Return a shallow copy of the result type. @@ -128,10 +130,10 @@ def copy( qubits. This is useful apply an instruction to a circuit and change the target qubits. Args: - target_mapping (dict[QubitInput, QubitInput]): A dictionary of + target_mapping (dict[QubitInput, QubitInput] | None): A dictionary of qubit mappings to apply to the target. Key is the qubit in this `target` and the value is what the key is changed to. Default = `None`. - target (QubitSetInput): Target qubits for the new instruction. + target (QubitSetInput | None): Target qubits for the new instruction. Returns: ResultType: A shallow copy of the result type. @@ -188,14 +190,14 @@ class ObservableResultType(ResultType): """ def __init__( - self, ascii_symbols: list[str], observable: Observable, target: QubitSetInput = None + self, ascii_symbols: list[str], observable: Observable, target: QubitSetInput | None = None ): """ Args: ascii_symbols (list[str]): ASCII string symbols for the result type. This is used when printing a diagram of circuits. observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must only operate on 1 qubit and it will be applied to all qubits in parallel @@ -287,8 +289,8 @@ def __init__( self, ascii_symbols: list[str], observable: Observable, - target: QubitSetInput = None, - parameters: list[Union[str, FreeParameter]] = None, + target: QubitSetInput | None = None, + parameters: list[Union[str, FreeParameter]] | None = None, ): super().__init__(ascii_symbols, observable, target) @@ -303,10 +305,10 @@ def __init__( ascii_symbols (list[str]): ASCII string symbols for the result type. This is used when printing a diagram of circuits. observable (Observable): the observable for the result type. - target (QubitSetInput): Target qubits that the result type is requested for. + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must only operate on 1 qubit and it will be applied to all qubits in parallel. - parameters (list[Union[str, FreeParameter]]): List of string inputs or + parameters (list[Union[str, FreeParameter]] | None): List of string inputs or FreeParameter objects. These inputs will be used as parameters for gradient calculation. Default: `all`. diff --git a/src/braket/circuits/result_types.py b/src/braket/circuits/result_types.py index cfe216164..0a1a1c630 100644 --- a/src/braket/circuits/result_types.py +++ b/src/braket/circuits/result_types.py @@ -91,10 +91,10 @@ class DensityMatrix(ResultType): This is available on simulators only when `shots=0`. """ - def __init__(self, target: QubitSetInput = None): + def __init__(self, target: QubitSetInput | None = None): """ Args: - target (QubitSetInput): The target qubits + target (QubitSetInput | None): The target qubits of the reduced density matrix. Default is `None`, and the full density matrix is returned. @@ -134,10 +134,10 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @staticmethod @circuit.subroutine(register=True) - def density_matrix(target: QubitSetInput = None) -> ResultType: + def density_matrix(target: QubitSetInput | None = None) -> ResultType: """Registers this function into the circuit class. Args: - target (QubitSetInput): The target qubits + target (QubitSetInput | None): The target qubits of the reduced density matrix. Default is `None`, and the full density matrix is returned. @@ -178,20 +178,20 @@ class AdjointGradient(ObservableParameterResultType): def __init__( self, observable: Observable, - target: list[QubitSetInput] = None, - parameters: list[Union[str, FreeParameter]] = None, + target: list[QubitSetInput] | None = None, + parameters: list[Union[str, FreeParameter]] | None = None, ): """ Args: observable (Observable): The expectation value of this observable is the function against which parameters in the gradient are differentiated. - target (list[QubitSetInput]): Target qubits that the result type is requested for. - Each term in the target list should have the same number of qubits as the + target (list[QubitSetInput] | None): Target qubits that the result type is requested + for. Each term in the target list should have the same number of qubits as the corresponding term in the observable. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. - parameters (list[Union[str, FreeParameter]]): The free parameters in the circuit to - differentiate with respect to. Default: `all`. + parameters (list[Union[str, FreeParameter]] | None): The free parameters in the circuit + to differentiate with respect to. Default: `all`. Raises: ValueError: If the observable's qubit count does not equal the number of target @@ -240,21 +240,21 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @circuit.subroutine(register=True) def adjoint_gradient( observable: Observable, - target: list[QubitSetInput] = None, - parameters: list[Union[str, FreeParameter]] = None, + target: list[QubitSetInput] | None = None, + parameters: list[Union[str, FreeParameter]] | None = None, ) -> ResultType: """Registers this function into the circuit class. Args: observable (Observable): The expectation value of this observable is the function against which parameters in the gradient are differentiated. - target (list[QubitSetInput]): Target qubits that the result type is requested for. - Each term in the target list should have the same number of qubits as the + target (list[QubitSetInput] | None): Target qubits that the result type is requested + for. Each term in the target list should have the same number of qubits as the corresponding term in the observable. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. - parameters (list[Union[str, FreeParameter]]): The free parameters in the circuit to - differentiate with respect to. Default: `all`. + parameters (list[Union[str, FreeParameter]] | None): The free parameters in the circuit + to differentiate with respect to. Default: `all`. Returns: ResultType: gradient computed via adjoint differentiation as a requested result type @@ -361,10 +361,10 @@ class Probability(ResultType): only on simulators and represents the exact result. """ - def __init__(self, target: QubitSetInput = None): + def __init__(self, target: QubitSetInput | None = None): """ Args: - target (QubitSetInput): The target qubits that the + target (QubitSetInput | None): The target qubits that the result type is requested for. Default is `None`, which means all qubits for the circuit. @@ -404,11 +404,11 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @staticmethod @circuit.subroutine(register=True) - def probability(target: QubitSetInput = None) -> ResultType: + def probability(target: QubitSetInput | None = None) -> ResultType: """Registers this function into the circuit class. Args: - target (QubitSetInput): The target qubits that the + target (QubitSetInput | None): The target qubits that the result type is requested for. Default is `None`, which means all qubits for the circuit. @@ -451,11 +451,11 @@ class Expectation(ObservableResultType): See :mod:`braket.circuits.observables` module for all of the supported observables. """ - def __init__(self, observable: Observable, target: QubitSetInput = None): + def __init__(self, observable: Observable, target: QubitSetInput | None = None): """ Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. @@ -493,12 +493,12 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @staticmethod @circuit.subroutine(register=True) - def expectation(observable: Observable, target: QubitSetInput = None) -> ResultType: + def expectation(observable: Observable, target: QubitSetInput | None = None) -> ResultType: """Registers this function into the circuit class. Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. @@ -526,11 +526,11 @@ class Sample(ObservableResultType): See :mod:`braket.circuits.observables` module for all of the supported observables. """ - def __init__(self, observable: Observable, target: QubitSetInput = None): + def __init__(self, observable: Observable, target: QubitSetInput | None = None): """ Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. @@ -568,12 +568,12 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @staticmethod @circuit.subroutine(register=True) - def sample(observable: Observable, target: QubitSetInput = None) -> ResultType: + def sample(observable: Observable, target: QubitSetInput | None = None) -> ResultType: """Registers this function into the circuit class. Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. @@ -602,11 +602,11 @@ class Variance(ObservableResultType): See :mod:`braket.circuits.observables` module for all of the supported observables. """ - def __init__(self, observable: Observable, target: QubitSetInput = None): + def __init__(self, observable: Observable, target: QubitSetInput | None = None): """ Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must operate only on 1 qubit and it is applied to all qubits in parallel. @@ -644,12 +644,12 @@ def _to_openqasm(self, serialization_properties: OpenQASMSerializationProperties @staticmethod @circuit.subroutine(register=True) - def variance(observable: Observable, target: QubitSetInput = None) -> ResultType: + def variance(observable: Observable, target: QubitSetInput | None = None) -> ResultType: """Registers this function into the circuit class. Args: observable (Observable): the observable for the result type - target (QubitSetInput): Target qubits that the + target (QubitSetInput | None): Target qubits that the result type is requested for. Default is `None`, which means the observable must only operate on 1 qubit and it will be applied to all qubits in parallel diff --git a/src/braket/jobs/config.py b/src/braket/jobs/config.py index 432e740e4..a598388e4 100644 --- a/src/braket/jobs/config.py +++ b/src/braket/jobs/config.py @@ -63,13 +63,13 @@ class S3DataSourceConfig: def __init__( self, s3_data: str, - content_type: str = None, + content_type: str | None = None, ): """Create a definition for input data used by a Braket Hybrid job. Args: s3_data (str): Defines the location of s3 data to train on. - content_type (str): MIME type of the input data (default: None). + content_type (str | None): MIME type of the input data (default: None). """ self.config = { "dataSource": { diff --git a/src/braket/jobs/data_persistence.py b/src/braket/jobs/data_persistence.py index 6ef5a6d18..f2ec9b6fa 100644 --- a/src/braket/jobs/data_persistence.py +++ b/src/braket/jobs/data_persistence.py @@ -65,7 +65,9 @@ def save_job_checkpoint( f.write(persisted_data.json()) -def load_job_checkpoint(job_name: str = None, checkpoint_file_suffix: str = "") -> dict[str, Any]: +def load_job_checkpoint( + job_name: str | None = None, checkpoint_file_suffix: str = "" +) -> dict[str, Any]: """ Loads the job checkpoint data stored for the job named 'job_name', with the checkpoint file that ends with the `checkpoint_file_suffix`. The `job_name` can refer to any job whose @@ -78,7 +80,7 @@ def load_job_checkpoint(job_name: str = None, checkpoint_file_suffix: str = "") Args: - job_name (str): str that specifies the name of the job whose checkpoints + job_name (str | None): str that specifies the name of the job whose checkpoints are to be loaded. Default: current job name. checkpoint_file_suffix (str): str specifying the file suffix that is used to @@ -110,7 +112,7 @@ def load_job_checkpoint(job_name: str = None, checkpoint_file_suffix: str = "") return deserialized_data -def _load_persisted_data(filename: str | Path = None) -> PersistedJobData: +def _load_persisted_data(filename: str | Path | None = None) -> PersistedJobData: filename = filename or Path(get_results_dir()) / "results.json" try: with open(filename, mode="r") as f: @@ -122,12 +124,12 @@ def _load_persisted_data(filename: str | Path = None) -> PersistedJobData: ) -def load_job_result(filename: str | Path = None) -> dict[str, Any]: +def load_job_result(filename: str | Path | None = None) -> dict[str, Any]: """ Loads job result of currently running job. Args: - filename (str | Path): Location of job results. Default `results.json` in job + filename (str | Path | None): Location of job results. Default `results.json` in job results directory in a job instance or in working directory locally. This file must be in the format used by `save_job_result`. @@ -141,7 +143,7 @@ def load_job_result(filename: str | Path = None) -> dict[str, Any]: def save_job_result( result_data: dict[str, Any] | Any, - data_format: PersistedJobDataFormat = None, + data_format: PersistedJobDataFormat | None = None, ) -> None: """ Saves the `result_data` to the local output directory that is specified by the container @@ -151,12 +153,11 @@ def save_job_result( Note: This function for storing the results is only for use inside the job container as it writes data to directories and references env variables set in the containers. - Args: result_data (dict[str, Any] | Any): Dict that specifies the result data to be persisted. If result data is not a dict, then it will be wrapped as `{"result": result_data}`. - data_format (PersistedJobDataFormat): The data format used to serialize the + data_format (PersistedJobDataFormat | None): The data format used to serialize the values. Note that for `PICKLED` data formats, the values are base64 encoded after serialization. Default: PersistedJobDataFormat.PLAINTEXT. """ diff --git a/src/braket/jobs/hybrid_job.py b/src/braket/jobs/hybrid_job.py index da0b8436a..ae17c2715 100644 --- a/src/braket/jobs/hybrid_job.py +++ b/src/braket/jobs/hybrid_job.py @@ -45,23 +45,23 @@ def hybrid_job( *, - device: str, - include_modules: str | ModuleType | Iterable[str | ModuleType] = None, - dependencies: str | Path | list[str] = None, + device: str | None, + include_modules: str | ModuleType | Iterable[str | ModuleType] | None = None, + dependencies: str | Path | list[str] | None = None, local: bool = False, - job_name: str = None, - image_uri: str = None, - input_data: str | dict | S3DataSourceConfig = None, + job_name: str | None = None, + image_uri: str | None = None, + input_data: str | dict | S3DataSourceConfig | None = None, wait_until_complete: bool = False, - instance_config: InstanceConfig = None, - distribution: str = None, - copy_checkpoints_from_job: str = None, - checkpoint_config: CheckpointConfig = None, - role_arn: str = None, - stopping_condition: StoppingCondition = None, - output_data_config: OutputDataConfig = None, - aws_session: AwsSession = None, - tags: dict[str, str] = None, + instance_config: InstanceConfig | None = None, + distribution: str | None = None, + copy_checkpoints_from_job: str | None = None, + checkpoint_config: CheckpointConfig | None = None, + role_arn: str | None = None, + stopping_condition: StoppingCondition | None = None, + output_data_config: OutputDataConfig | None = None, + aws_session: AwsSession | None = None, + tags: dict[str, str] | None = None, logger: Logger = getLogger(__name__), ) -> Callable: """Defines a hybrid job by decorating the entry point function. The job will be created @@ -73,34 +73,34 @@ def hybrid_job( `copy_checkpoints_from_job`, `stopping_condition`, `tags`, and `logger`. Args: - device (str): Device ARN of the QPU device that receives priority quantum + device (str | None): Device ARN of the QPU device that receives priority quantum task queueing once the hybrid job begins running. Each QPU has a separate hybrid jobs queue so that only one hybrid job is running at a time. The device string is accessible in the hybrid job instance as the environment variable "AMZN_BRAKET_DEVICE_ARN". When using embedded simulators, you may provide the device argument as string of the form: "local:/" or `None`. - include_modules (str | ModuleType | Iterable[str | ModuleType]): Either a + include_modules (str | ModuleType | Iterable[str | ModuleType] | None): Either a single module or module name or a list of module or module names referring to local modules to be included. Any references to members of these modules in the hybrid job algorithm code will be serialized as part of the algorithm code. Default: `[]` - dependencies (str | Path | list[str]): Path (absolute or relative) to a requirements.txt - file, or alternatively a list of strings, with each string being a `requirement - specifier `_, to be used for the hybrid job. local (bool): Whether to use local mode for the hybrid job. Default: `False` - job_name (str): A string that specifies the name with which the job is created. + job_name (str | None): A string that specifies the name with which the job is created. Allowed pattern for job name: `^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,50}$`. Defaults to f'{decorated-function-name}-{timestamp}'. - image_uri (str): A str that specifies the ECR image to use for executing the job. + image_uri (str | None): A str that specifies the ECR image to use for executing the job. `retrieve_image()` function may be used for retrieving the ECR image URIs for the containers supported by Braket. Default: ``. - input_data (str | dict | S3DataSourceConfig): Information about the training + input_data (str | dict | S3DataSourceConfig | None): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}'. If a local @@ -112,41 +112,41 @@ def hybrid_job( This would tail the job logs as it waits. Otherwise `False`. Ignored if using local mode. Default: `False`. - instance_config (InstanceConfig): Configuration of the instance(s) for running the + instance_config (InstanceConfig | None): Configuration of the instance(s) for running the classical code for the hybrid job. Default: `InstanceConfig(instanceType='ml.m5.large', instanceCount=1, volumeSizeInGB=30)`. - distribution (str): A str that specifies how the job should be distributed. + distribution (str | None): A str that specifies how the job should be distributed. If set to "data_parallel", the hyperparameters for the job will be set to use data parallelism features for PyTorch or TensorFlow. Default: `None`. - copy_checkpoints_from_job (str): A str that specifies the job ARN whose + copy_checkpoints_from_job (str | None): A str that specifies the job ARN whose checkpoint you want to use in the current job. Specifying this value will copy over the checkpoint data from `use_checkpoints_from_job`'s checkpoint_config s3Uri to the current job's checkpoint_config s3Uri, making it available at checkpoint_config.localPath during the job execution. Default: `None` - checkpoint_config (CheckpointConfig): Configuration that specifies the + checkpoint_config (CheckpointConfig | None): Configuration that specifies the location where checkpoint data is stored. Default: `CheckpointConfig(localPath='/opt/jobs/checkpoints', s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints')`. - role_arn (str): A str providing the IAM role ARN used to execute the + role_arn (str | None): A str providing the IAM role ARN used to execute the script. Default: IAM role returned by AwsSession's `get_default_jobs_role()`. - stopping_condition (StoppingCondition): The maximum length of time, in seconds, + stopping_condition (StoppingCondition | None): The maximum length of time, in seconds, and the maximum number of tasks that a job can run before being forcefully stopped. Default: StoppingCondition(maxRuntimeInSeconds=5 * 24 * 60 * 60). - output_data_config (OutputDataConfig): Specifies the location for the output of + output_data_config (OutputDataConfig | None): Specifies the location for the output of the job. Default: `OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', kmsKeyId=None)`. - aws_session (AwsSession): AwsSession for connecting to AWS Services. + aws_session (AwsSession | None): AwsSession for connecting to AWS Services. Default: AwsSession() - tags (dict[str, str]): Dict specifying the key-value pairs for tagging this job. + tags (dict[str, str] | None): Dict specifying the key-value pairs for tagging this job. Default: {}. logger (Logger): Logger object with which to write logs, such as task statuses diff --git a/src/braket/jobs/local/local_job.py b/src/braket/jobs/local/local_job.py index dc9b09e85..f516d9693 100644 --- a/src/braket/jobs/local/local_job.py +++ b/src/braket/jobs/local/local_job.py @@ -38,16 +38,16 @@ def create( cls, device: str, source_module: str, - entry_point: str = None, - image_uri: str = None, - job_name: str = None, - code_location: str = None, - role_arn: str = None, - hyperparameters: dict[str, Any] = None, - input_data: str | dict | S3DataSourceConfig = None, - output_data_config: OutputDataConfig = None, - checkpoint_config: CheckpointConfig = None, - aws_session: AwsSession = None, + entry_point: str | None = None, + image_uri: str | None = None, + job_name: str | None = None, + code_location: str | None = None, + role_arn: str | None = None, + hyperparameters: dict[str, Any] | None = None, + input_data: str | dict | S3DataSourceConfig | None = None, + output_data_config: OutputDataConfig | None = None, + checkpoint_config: CheckpointConfig | None = None, + aws_session: AwsSession | None = None, local_container_update: bool = True, ) -> LocalQuantumJob: """Creates and runs hybrid job by setting up and running the customer script in a local @@ -65,32 +65,34 @@ def create( tarred and uploaded. If `source_module` is an S3 URI, it must point to a tar.gz file. Otherwise, source_module may be a file or directory. - entry_point (str): A str that specifies the entry point of the hybrid job, relative to - the source module. The entry point must be in the format + entry_point (str | None): A str that specifies the entry point of the hybrid job, + relative to the source module. The entry point must be in the format `importable.module` or `importable.module:callable`. For example, `source_module.submodule:start_here` indicates the `start_here` function contained in `source_module.submodule`. If source_module is an S3 URI, entry point must be given. Default: source_module's name - image_uri (str): A str that specifies the ECR image to use for executing the hybrid job. - `image_uris.retrieve_image()` function may be used for retrieving the ECR image URIs - for the containers supported by Braket. Default = ``. + image_uri (str | None): A str that specifies the ECR image to use for executing the + hybrid job. `image_uris.retrieve_image()` function may be used for retrieving the + ECR image URIs for the containers supported by Braket. + Default = ``. - job_name (str): A str that specifies the name with which the hybrid job is created. + job_name (str | None): A str that specifies the name with which the hybrid job is + created. Default: f'{image_uri_type}-{timestamp}'. - code_location (str): The S3 prefix URI where custom code will be uploaded. + code_location (str | None): The S3 prefix URI where custom code will be uploaded. Default: f's3://{default_bucket_name}/jobs/{job_name}/script'. - role_arn (str): This field is currently not used for local hybrid jobs. Local hybrid - jobs will use the current role's credentials. This may be subject to change. + role_arn (str | None): This field is currently not used for local hybrid jobs. Local + hybrid jobs will use the current role's credentials. This may be subject to change. - hyperparameters (dict[str, Any]): Hyperparameters accessible to the hybrid job. + hyperparameters (dict[str, Any] | None): Hyperparameters accessible to the hybrid job. The hyperparameters are made accessible as a Dict[str, str] to the hybrid job. For convenience, this accepts other types for keys and values, but `str()` is called to convert them before being passed on. Default: None. - input_data (str | dict | S3DataSourceConfig): Information about the training + input_data (str | dict | S3DataSourceConfig | None): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local @@ -98,17 +100,17 @@ def create( channel name "input". Default: {}. - output_data_config (OutputDataConfig): Specifies the location for the output of the - hybrid job. + output_data_config (OutputDataConfig | None): Specifies the location for the output of + the hybrid job. Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', kmsKeyId=None). - checkpoint_config (CheckpointConfig): Configuration that specifies the location where - checkpoint data is stored. + checkpoint_config (CheckpointConfig | None): Configuration that specifies the location + where checkpoint data is stored. Default: CheckpointConfig(localPath='/opt/jobs/checkpoints', s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints'). - aws_session (AwsSession): AwsSession for connecting to AWS Services. + aws_session (AwsSession | None): AwsSession for connecting to AWS Services. Default: AwsSession() local_container_update (bool): Perform an update, if available, from ECR to the local @@ -163,11 +165,12 @@ def create( run_log = container.run_log return LocalQuantumJob(f"local:job/{job_name}", run_log) - def __init__(self, arn: str, run_log: str = None): + def __init__(self, arn: str, run_log: str | None = None): """ Args: arn (str): The ARN of the hybrid job. - run_log (str): The container output log of running the hybrid job with the given arn. + run_log (str | None): The container output log of running the hybrid job with the + given arn. """ if not arn.startswith("local:job/"): raise ValueError(f"Arn {arn} is not a valid local job arn") @@ -235,14 +238,14 @@ def cancel(self) -> str: def download_result( self, - extract_to: str = None, + extract_to: str | None = None, poll_timeout_seconds: float = QuantumJob.DEFAULT_RESULTS_POLL_TIMEOUT, poll_interval_seconds: float = QuantumJob.DEFAULT_RESULTS_POLL_INTERVAL, ) -> None: """When running the hybrid job in local mode, results are automatically stored locally. Args: - extract_to (str): The directory to which the results are extracted. The results + extract_to (str | None): The directory to which the results are extracted. The results are extracted to a folder titled with the hybrid job name within this directory. Default= `Current working directory`. poll_timeout_seconds (float): The polling timeout, in seconds, for `result()`. diff --git a/src/braket/jobs/local/local_job_container.py b/src/braket/jobs/local/local_job_container.py index f924db47f..ea5625623 100644 --- a/src/braket/jobs/local/local_job_container.py +++ b/src/braket/jobs/local/local_job_container.py @@ -10,6 +10,8 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +from __future__ import annotations + import base64 import re import subprocess @@ -29,7 +31,7 @@ class _LocalJobContainer(object): def __init__( self, image_uri: str, - aws_session: AwsSession = None, + aws_session: AwsSession | None = None, logger: Logger = getLogger(__name__), force_update: bool = False, ): @@ -39,7 +41,7 @@ def __init__( The function "end_session" must be called when the container is no longer needed. Args: image_uri (str): The URI of the container image to run. - aws_session (AwsSession): AwsSession for connecting to AWS Services. + aws_session (AwsSession | None): AwsSession for connecting to AWS Services. Default: AwsSession() logger (Logger): Logger object with which to write logs. Default: `getLogger(__name__)` diff --git a/src/braket/jobs/metrics_data/cwl_insights_metrics_fetcher.py b/src/braket/jobs/metrics_data/cwl_insights_metrics_fetcher.py index 94ed5499d..b32cd6b9c 100644 --- a/src/braket/jobs/metrics_data/cwl_insights_metrics_fetcher.py +++ b/src/braket/jobs/metrics_data/cwl_insights_metrics_fetcher.py @@ -11,6 +11,8 @@ # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +from __future__ import annotations + import time from logging import Logger, getLogger from typing import Any, Dict, List, Optional, Union @@ -133,8 +135,8 @@ def get_metrics_for_job( job_name: str, metric_type: MetricType = MetricType.TIMESTAMP, statistic: MetricStatistic = MetricStatistic.MAX, - job_start_time: int = None, - job_end_time: int = None, + job_start_time: int | None = None, + job_end_time: int | None = None, ) -> Dict[str, List[Union[str, float, int]]]: """ Synchronously retrieves all the algorithm metrics logged by a given Hybrid Job. @@ -145,10 +147,10 @@ def get_metrics_for_job( metric_type (MetricType): The type of metrics to get. Default is MetricType.TIMESTAMP. statistic (MetricStatistic): The statistic to determine which metric value to use when there is a conflict. Default is MetricStatistic.MAX. - job_start_time (int): The time when the hybrid job started. + job_start_time (int | None): The time when the hybrid job started. Default: 3 hours before job_end_time. - job_end_time (int): If the hybrid job is complete, this should be the time at which the - hybrid job finished. Default: current time. + job_end_time (int | None): If the hybrid job is complete, this should be the time at + which the hybrid job finished. Default: current time. Returns: Dict[str, List[Union[str, float, int]]] : The metrics data, where the keys diff --git a/src/braket/jobs/quantum_job.py b/src/braket/jobs/quantum_job.py index f022d28a3..a84118991 100644 --- a/src/braket/jobs/quantum_job.py +++ b/src/braket/jobs/quantum_job.py @@ -10,6 +10,8 @@ # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. +from __future__ import annotations + from abc import ABC, abstractmethod from typing import Any, Dict, List @@ -170,7 +172,7 @@ def result( @abstractmethod def download_result( self, - extract_to: str = None, + extract_to: str | None = None, poll_timeout_seconds: float = DEFAULT_RESULTS_POLL_TIMEOUT, poll_interval_seconds: float = DEFAULT_RESULTS_POLL_INTERVAL, ) -> None: @@ -179,7 +181,7 @@ def download_result( the results are extracted to the current directory. Args: - extract_to (str): The directory to which the results are extracted. The results + extract_to (str | None): The directory to which the results are extracted. The results are extracted to a folder titled with the hybrid job name within this directory. Default= `Current working directory`. diff --git a/src/braket/jobs/quantum_job_creation.py b/src/braket/jobs/quantum_job_creation.py index e34df0508..458a56b49 100644 --- a/src/braket/jobs/quantum_job_creation.py +++ b/src/braket/jobs/quantum_job_creation.py @@ -40,21 +40,21 @@ def prepare_quantum_job( device: str, source_module: str, - entry_point: str = None, - image_uri: str = None, - job_name: str = None, - code_location: str = None, - role_arn: str = None, - hyperparameters: dict[str, Any] = None, - input_data: str | dict | S3DataSourceConfig = None, - instance_config: InstanceConfig = None, - distribution: str = None, - stopping_condition: StoppingCondition = None, - output_data_config: OutputDataConfig = None, - copy_checkpoints_from_job: str = None, - checkpoint_config: CheckpointConfig = None, - aws_session: AwsSession = None, - tags: dict[str, str] = None, + entry_point: str | None = None, + image_uri: str | None = None, + job_name: str | None = None, + code_location: str | None = None, + role_arn: str | None = None, + hyperparameters: dict[str, Any] | None = None, + input_data: str | dict | S3DataSourceConfig | None = None, + instance_config: InstanceConfig | None = None, + distribution: str | None = None, + stopping_condition: StoppingCondition | None = None, + output_data_config: OutputDataConfig | None = None, + copy_checkpoints_from_job: str | None = None, + checkpoint_config: CheckpointConfig | None = None, + aws_session: AwsSession | None = None, + tags: dict[str, str] | None = None, ) -> dict: """Creates a hybrid job by invoking the Braket CreateJob API. @@ -70,34 +70,34 @@ def prepare_quantum_job( tarred and uploaded. If `source_module` is an S3 URI, it must point to a tar.gz file. Otherwise, source_module may be a file or directory. - entry_point (str): A str that specifies the entry point of the hybrid job, relative to - the source module. The entry point must be in the format + entry_point (str | None): A str that specifies the entry point of the hybrid job, relative + to the source module. The entry point must be in the format `importable.module` or `importable.module:callable`. For example, `source_module.submodule:start_here` indicates the `start_here` function contained in `source_module.submodule`. If source_module is an S3 URI, entry point must be given. Default: source_module's name - image_uri (str): A str that specifies the ECR image to use for executing the hybrid job. - `image_uris.retrieve_image()` function may be used for retrieving the ECR image URIs + image_uri (str | None): A str that specifies the ECR image to use for executing the hybrid + job.`image_uris.retrieve_image()` function may be used for retrieving the ECR image URIs for the containers supported by Braket. Default = ``. - job_name (str): A str that specifies the name with which the hybrid job is created. The - hybrid job - name must be between 0 and 50 characters long and cannot contain underscores. + job_name (str | None): A str that specifies the name with which the hybrid job is created. + The hybrid job name must be between 0 and 50 characters long and cannot contain + underscores. Default: f'{image_uri_type}-{timestamp}'. - code_location (str): The S3 prefix URI where custom code will be uploaded. + code_location (str | None): The S3 prefix URI where custom code will be uploaded. Default: f's3://{default_bucket_name}/jobs/{job_name}/script'. - role_arn (str): A str providing the IAM role ARN used to execute the + role_arn (str | None): A str providing the IAM role ARN used to execute the script. Default: IAM role returned by AwsSession's `get_default_jobs_role()`. - hyperparameters (dict[str, Any]): Hyperparameters accessible to the hybrid job. + hyperparameters (dict[str, Any] | None): Hyperparameters accessible to the hybrid job. The hyperparameters are made accessible as a Dict[str, str] to the hybrid job. For convenience, this accepts other types for keys and values, but `str()` is called to convert them before being passed on. Default: None. - input_data (str | dict | S3DataSourceConfig): Information about the training + input_data (str | dict | S3DataSourceConfig | None): Information about the training data. Dictionary maps channel names to local paths or S3 URIs. Contents found at any local paths will be uploaded to S3 at f's3://{default_bucket_name}/jobs/{job_name}/data/{channel_name}. If a local @@ -105,38 +105,39 @@ def prepare_quantum_job( channel name "input". Default: {}. - instance_config (InstanceConfig): Configuration of the instance(s) for running the + instance_config (InstanceConfig | None): Configuration of the instance(s) for running the classical code for the hybrid job. Defaults to `InstanceConfig(instanceType='ml.m5.large', instanceCount=1, volumeSizeInGB=30)`. - distribution (str): A str that specifies how the hybrid job should be distributed. If set to - "data_parallel", the hyperparameters for the hybrid job will be set to use data - parallelism features for PyTorch or TensorFlow. Default: None. + distribution (str | None): A str that specifies how the hybrid job should be distributed. + If set to "data_parallel", the hyperparameters for the hybrid job will be set to use + data parallelism features for PyTorch or TensorFlow. Default: None. - stopping_condition (StoppingCondition): The maximum length of time, in seconds, + stopping_condition (StoppingCondition | None): The maximum length of time, in seconds, and the maximum number of quantum tasks that a hybrid job can run before being forcefully stopped. Default: StoppingCondition(maxRuntimeInSeconds=5 * 24 * 60 * 60). - output_data_config (OutputDataConfig): Specifies the location for the output of the hybrid - job. + output_data_config (OutputDataConfig | None): Specifies the location for the output of the + hybrid job. Default: OutputDataConfig(s3Path=f's3://{default_bucket_name}/jobs/{job_name}/data', kmsKeyId=None). - copy_checkpoints_from_job (str): A str that specifies the hybrid job ARN whose checkpoint - you want to use in the current hybrid job. Specifying this value will copy over the - checkpoint data from `use_checkpoints_from_job`'s checkpoint_config s3Uri to the current - hybrid job's checkpoint_config s3Uri, making it available at checkpoint_config.localPath - during the hybrid job execution. Default: None + copy_checkpoints_from_job (str | None): A str that specifies the hybrid job ARN whose + checkpoint you want to use in the current hybrid job. Specifying this value will copy + over the checkpoint data from `use_checkpoints_from_job`'s checkpoint_config s3Uri to + the current hybrid job's checkpoint_config s3Uri, making it available at + checkpoint_config.localPath during the hybrid job execution. Default: None - checkpoint_config (CheckpointConfig): Configuration that specifies the location where + checkpoint_config (CheckpointConfig | None): Configuration that specifies the location where checkpoint data is stored. Default: CheckpointConfig(localPath='/opt/jobs/checkpoints', s3Uri=f's3://{default_bucket_name}/jobs/{job_name}/checkpoints'). - aws_session (AwsSession): AwsSession for connecting to AWS Services. + aws_session (AwsSession | None): AwsSession for connecting to AWS Services. Default: AwsSession() - tags (dict[str, str]): Dict specifying the key-value pairs for tagging this hybrid job. + tags (dict[str, str] | None): Dict specifying the key-value pairs for tagging this + hybrid job. Default: {}. Returns: @@ -232,13 +233,13 @@ def prepare_quantum_job( return create_job_kwargs -def _generate_default_job_name(image_uri: str = None, func: Callable = None) -> str: +def _generate_default_job_name(image_uri: str | None = None, func: Callable | None = None) -> str: """ Generate default job name using the image uri and entrypoint function. Args: - image_uri (str): URI for the image container. - func (Callable): The entry point function. + image_uri (str | None): URI for the image container. + func (Callable | None): The entry point function. Returns: str: Hybrid job name. diff --git a/src/braket/pulse/pulse_sequence.py b/src/braket/pulse/pulse_sequence.py index e0808316b..bb783c69b 100644 --- a/src/braket/pulse/pulse_sequence.py +++ b/src/braket/pulse/pulse_sequence.py @@ -399,12 +399,12 @@ def _parse_from_calibration_schema( raise ValueError(f"The {instr['name']} instruction has not been implemented") return calibration_sequence - def __call__(self, arg: Any = None, **kwargs) -> PulseSequence: + def __call__(self, arg: Any | None = None, **kwargs) -> PulseSequence: """ Implements the call function to easily make a bound PulseSequence. Args: - arg (Any): A value to bind to all parameters. Defaults to None and + arg (Any | None): A value to bind to all parameters. Defaults to None and can be overridden if the parameter is in kwargs. Returns: diff --git a/src/braket/registers/qubit_set.py b/src/braket/registers/qubit_set.py index 938d7fb50..0f9d0a7ac 100644 --- a/src/braket/registers/qubit_set.py +++ b/src/braket/registers/qubit_set.py @@ -40,10 +40,11 @@ class QubitSet(IndexedSet): mutating this object. """ - def __init__(self, qubits: QubitSetInput = None): + def __init__(self, qubits: QubitSetInput | None = None): """ Args: - qubits (QubitSetInput): Qubits to be included in the `QubitSet`. Default is `None`. + qubits (QubitSetInput | None): Qubits to be included in the `QubitSet`. + Default is `None`. Examples: >>> qubits = QubitSet([0, 1])