diff --git a/pynitrokey/cli/__init__.py b/pynitrokey/cli/__init__.py index d51a8ba0..1e56714b 100644 --- a/pynitrokey/cli/__init__.py +++ b/pynitrokey/cli/__init__.py @@ -72,7 +72,7 @@ def nitropy(): "ecdsa", "fido2", "pyusb", - "spsdk", + # "spsdk", ] for x in pymodules: logger.info(f"{x} version: {package_version(x)}") diff --git a/pynitrokey/cli/fido2.py b/pynitrokey/cli/fido2.py index a20578f4..00ecc00a 100644 --- a/pynitrokey/cli/fido2.py +++ b/pynitrokey/cli/fido2.py @@ -817,7 +817,7 @@ def version(serial: Optional[str], udp: bool) -> None: locked = "" # @todo: if len(res) > 3: - if res[3]: # type: ignore + if res[3]: locked = "locked" else: locked = "unlocked" diff --git a/pynitrokey/nk3/updates.py b/pynitrokey/nk3/updates.py index 36120a05..5d4d4f14 100644 --- a/pynitrokey/nk3/updates.py +++ b/pynitrokey/nk3/updates.py @@ -16,8 +16,6 @@ from io import BytesIO from typing import Any, Callable, Iterator, List, Optional -from spsdk.mboot.exceptions import McuBootConnectionError - import pynitrokey from pynitrokey.helpers import Retries from pynitrokey.nk3 import NK3_DATA @@ -35,6 +33,8 @@ from pynitrokey.trussed.utils import Version from pynitrokey.updates import Asset, Release +from ..trussed.bootloader.lpc55_upload.mboot.exceptions import McuBootConnectionError + logger = logging.getLogger(__name__) diff --git a/pynitrokey/trussed/bootloader/lpc55.py b/pynitrokey/trussed/bootloader/lpc55.py index f6aa493c..192c10c5 100644 --- a/pynitrokey/trussed/bootloader/lpc55.py +++ b/pynitrokey/trussed/bootloader/lpc55.py @@ -13,17 +13,17 @@ import sys from typing import List, Optional, Tuple, TypeVar -from spsdk.mboot.error_codes import StatusCode -from spsdk.mboot.interfaces.usb import MbootUSBInterface -from spsdk.mboot.mcuboot import McuBoot -from spsdk.mboot.properties import PropertyTag -from spsdk.sbfile.sb2.images import BootImageV21 -from spsdk.utils.interfaces.device.usb_device import UsbDevice -from spsdk.utils.usbfilter import USBDeviceFilter - from pynitrokey.trussed.utils import Uuid, Version from . import FirmwareMetadata, NitrokeyTrussedBootloader, ProgressCallback, Variant +from .lpc55_upload.mboot.error_codes import StatusCode +from .lpc55_upload.mboot.interfaces.usb import MbootUSBInterface +from .lpc55_upload.mboot.mcuboot import McuBoot +from .lpc55_upload.mboot.properties import PropertyTag +from .lpc55_upload.sbfile.misc import BcdVersion3 +from .lpc55_upload.sbfile.sb2.images import BootImageV21 +from .lpc55_upload.utils.interfaces.device.usb_device import UsbDevice +from .lpc55_upload.utils.usbfilter import USBDeviceFilter RKTH = bytes.fromhex("050aad3e77791a81e59c5b2ba5a158937e9460ee325d8ccba09734b8fdebb171") KEK = bytes([0xAA] * 32) @@ -58,8 +58,10 @@ def path(self) -> str: return self._path @property - def status(self) -> str: - return self.device.status_string + def status(self) -> Tuple[int, str]: + code = self.device.status_code + message = StatusCode.get_label(code) + return (code, message) def close(self) -> None: self.device.close() @@ -136,9 +138,13 @@ def open(cls: type[T], path: str) -> Optional[T]: return None +def parse_bcd_version(version: BcdVersion3) -> Version: + return Version(major=version.major, minor=version.minor, patch=version.service) + + def parse_firmware_image(data: bytes) -> FirmwareMetadata: image = BootImageV21.parse(data, kek=KEK) - version = Version.from_bcd_version(image.header.product_version) + version = parse_bcd_version(image.header.product_version) metadata = FirmwareMetadata(version=version) if image.cert_block: if image.cert_block.rkth == RKTH: diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/README.md b/pynitrokey/trussed/bootloader/lpc55_upload/README.md new file mode 100644 index 00000000..189b86ca --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/README.md @@ -0,0 +1,6 @@ +# LPC55 Bootloader Firmware Upload Module + +Anything inside this directory is originally extracted from: https://github.com/nxp-mcuxpresso/spsdk/tree/master. +In detail anything that is needed to upload a signed firmware image to a Nitrokey 3 xN with an LPC55 MCU. + + diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/__init__.py new file mode 100644 index 00000000..e2c02c55 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/__init__.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + + +version = "2.1.0" + +import os + +__author__ = "NXP" +__license__ = "BSD-3-Clause" +__version__ = version +__release__ = "beta" + +# The SPSDK behavior settings +# SPSDK_DATA_FOLDER might be redefined by SPSDK_DATA_FOLDER_{version} +# or SPSDK_DATA_FOLDER env variable +SPSDK_DATA_FOLDER_ENV_VERSION = "SPSDK_DATA_FOLDER_" + version.replace(".", "_") +SPSDK_DATA_FOLDER = ( + os.environ.get(SPSDK_DATA_FOLDER_ENV_VERSION) + or os.environ.get("SPSDK_DATA_FOLDER") + or os.path.join(os.path.dirname(os.path.abspath(__file__)), "data") +) +SPSDK_DATA_FOLDER_COMMON = os.path.join(SPSDK_DATA_FOLDER, "common") +SPSDK_DATA_FOLDER_SCHEMAS = os.path.join(SPSDK_DATA_FOLDER, "jsonschemas") + +# SPSDK_CACHE_DISABLED might be redefined by SPSDK_CACHE_DISABLED_{version} env variable, default is False +SPSDK_ENV_CACHE_DISABLED = "SPSDK_CACHE_DISABLED_" + version.replace(".", "_") +SPSDK_CACHE_DISABLED = bool( + os.environ.get(SPSDK_ENV_CACHE_DISABLED) + or os.environ.get("SPSDK_CACHE_DISABLED") + or False +) + +SPSDK_YML_INDENT = 2 + + +ROOT_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), "..")) +SPSDK_EXAMPLES_FOLDER = os.path.relpath(os.path.join(ROOT_DIR, "examples")) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/apps/utils/utils.py b/pynitrokey/trussed/bootloader/lpc55_upload/apps/utils/utils.py new file mode 100644 index 00000000..e4db5c8b --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/apps/utils/utils.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +import os +from typing import Dict + +from ...utils.misc import get_abs_path, write_file + + +def filepath_from_config( + config: Dict, + key: str, + default_value: str, + base_dir: str, + output_folder: str = "", + file_extension: str = ".bin", +) -> str: + """Get file path from configuration dictionary and append .bin if the value is not blank. + + Function returns the output_folder + filename if the filename does not contain path. + In case filename contains path, return filename and append ".bin". + The empty string "" indicates that the user doesn't want the output. + :param config: Configuration dictionary + :param key: Name of the key + :param default_value: default value in case key value is not present + :param base_dir: base directory for path expansion + :param output_folder: Output folder, if blank file path from config will be used + :param file_extension: File extension that will be appended + :return: filename with appended ".bin" or blank filename "" + """ + filename = config.get(key, default_value) + if filename == "": + return filename + if not os.path.dirname(filename): + filename = os.path.join(output_folder, filename) + if not filename.endswith(file_extension): + filename += file_extension + return get_abs_path(filename, base_dir) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/__init__.py new file mode 100644 index 00000000..cd69e9a0 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""Module for crypto operations (certificate and key management).""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/certificate.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/certificate.py new file mode 100644 index 00000000..e58feb61 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/certificate.py @@ -0,0 +1,412 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""Module for certificate management (generating certificate, validating certificate, chains).""" + +from datetime import datetime, timedelta +from typing import Dict, List, Optional, Union + +from cryptography import x509 +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec, rsa +from cryptography.x509.extensions import ExtensionNotFound +from typing_extensions import Self + +from ..crypto.hash import EnumHashAlgorithm +from ..crypto.keys import PrivateKey, PublicKey, PublicKeyEcc, PublicKeyRsa +from ..crypto.types import ( + SPSDKEncoding, + SPSDKExtensionOID, + SPSDKExtensions, + SPSDKName, + SPSDKNameOID, + SPSDKObjectIdentifier, + SPSDKVersion, +) +from ..exceptions import SPSDKError, SPSDKValueError +from ..utils.abstract import BaseClass +from ..utils.misc import align_block, load_binary, write_file + + +class SPSDKExtensionNotFoundError(SPSDKError, ExtensionNotFound): + """Extension not found error.""" + + +class Certificate(BaseClass): + """SPSDK Certificate representation.""" + + def __init__(self, certificate: x509.Certificate) -> None: + """Constructor of SPSDK Certificate. + + :param certificate: Cryptography Certificate representation. + """ + assert isinstance(certificate, x509.Certificate) + self.cert = certificate + + @staticmethod + def generate_certificate( + subject: x509.Name, + issuer: x509.Name, + subject_public_key: PublicKey, + issuer_private_key: PrivateKey, + serial_number: Optional[int] = None, + duration: Optional[int] = None, + extensions: Optional[List[x509.ExtensionType]] = None, + ) -> "Certificate": + """Generate certificate. + + :param subject: subject name that the CA issues the certificate to + :param issuer: issuer name that issued the certificate + :param subject_public_key: Public key of subject + :param issuer_private_key: Private key of issuer + :param serial_number: certificate serial number, if not specified, random serial number will be set + :param duration: how long the certificate will be valid (in days) + :param extensions: List of extensions to include in the certificate + :return: certificate + """ + before = datetime.utcnow() if duration else datetime(2000, 1, 1) + after = ( + datetime.utcnow() + timedelta(days=duration) + if duration + else datetime(9999, 12, 31) + ) + crt = x509.CertificateBuilder( + subject_name=subject, + issuer_name=issuer, + not_valid_before=before, + not_valid_after=after, + public_key=subject_public_key.key, + # we don't pass extensions directly, need to handle the "critical" flag + extensions=[], + serial_number=serial_number or x509.random_serial_number(), + ) + + if extensions: + for ext in extensions: + crt = crt.add_extension(ext, critical=True) + + return Certificate(crt.sign(issuer_private_key.key, hashes.SHA256())) + + def save( + self, + file_path: str, + encoding_type: SPSDKEncoding = SPSDKEncoding.PEM, + ) -> None: + """Save the certificate/CSR into file. + + :param file_path: path to the file where item will be stored + :param encoding_type: encoding type (PEM or DER) + """ + write_file(self.export(encoding_type), file_path, mode="wb") + + @classmethod + def load(cls, file_path: str) -> Self: + """Load the Certificate from the given file. + + :param file_path: path to the file, where the key is stored + """ + data = load_binary(file_path) + return cls.parse(data=data) + + def export(self, encoding: SPSDKEncoding = SPSDKEncoding.NXP) -> bytes: + """Convert certificates into bytes. + + :param encoding: encoding type + :return: certificate in bytes form + """ + if encoding == SPSDKEncoding.NXP: + return align_block(self.export(SPSDKEncoding.DER), 4, "zeros") + + return self.cert.public_bytes( + SPSDKEncoding.get_cryptography_encodings(encoding) + ) + + def get_public_key(self) -> PublicKey: + """Get public keys from certificate. + + :return: RSA public key + """ + pub_key = self.cert.public_key() + if isinstance(pub_key, rsa.RSAPublicKey): + return PublicKeyRsa(pub_key) + if isinstance(pub_key, ec.EllipticCurvePublicKey): + return PublicKeyEcc(pub_key) + + raise SPSDKError(f"Unsupported Certificate public key: {type(pub_key)}") + + @property + def version(self) -> SPSDKVersion: + """Returns the certificate version.""" + return self.cert.version + + @property + def signature(self) -> bytes: + """Returns the signature bytes.""" + return self.cert.signature + + @property + def tbs_certificate_bytes(self) -> bytes: + """Returns the tbsCertificate payload bytes as defined in RFC 5280.""" + return self.cert.tbs_certificate_bytes + + @property + def signature_hash_algorithm( + self, + ) -> Optional[hashes.HashAlgorithm]: + """Returns a HashAlgorithm corresponding to the type of the digest signed in the certificate.""" + return self.cert.signature_hash_algorithm + + @property + def extensions(self) -> SPSDKExtensions: + """Returns an Extensions object.""" + return self.cert.extensions + + @property + def issuer(self) -> SPSDKName: + """Returns the issuer name object.""" + return self.cert.issuer + + @property + def serial_number(self) -> int: + """Returns certificate serial number.""" + return self.cert.serial_number + + @property + def subject(self) -> SPSDKName: + """Returns the subject name object.""" + return self.cert.subject + + @property + def signature_algorithm_oid(self) -> SPSDKObjectIdentifier: + """Returns the ObjectIdentifier of the signature algorithm.""" + return self.cert.signature_algorithm_oid + + def validate_subject(self, subject_certificate: "Certificate") -> bool: + """Validate certificate. + + :param subject_certificate: Subject's certificate + :raises SPSDKError: Unsupported key type in Certificate + :return: true/false whether certificate is valid or not + """ + assert subject_certificate.signature_hash_algorithm + return self.get_public_key().verify_signature( + subject_certificate.signature, + subject_certificate.tbs_certificate_bytes, + EnumHashAlgorithm.from_label( + subject_certificate.signature_hash_algorithm.name + ), + ) + + def validate(self, issuer_certificate: "Certificate") -> bool: + """Validate certificate. + + :param issuer_certificate: Issuer's certificate + :raises SPSDKError: Unsupported key type in Certificate + :return: true/false whether certificate is valid or not + """ + assert self.signature_hash_algorithm + return issuer_certificate.get_public_key().verify_signature( + self.signature, + self.tbs_certificate_bytes, + EnumHashAlgorithm.from_label(self.signature_hash_algorithm.name), + ) + + @property + def ca(self) -> bool: + """Check if CA flag is set in certificate. + + :return: true/false depending whether ca flag is set or not + """ + extension = self.extensions.get_extension_for_oid( + SPSDKExtensionOID.BASIC_CONSTRAINTS + ) + return extension.value.ca # type: ignore # mypy can not handle property definition in cryptography + + @property + def self_signed(self) -> bool: + """Indication whether the Certificate is self-signed.""" + return self.validate(self) + + @property + def raw_size(self) -> int: + """Raw size of the certificate.""" + return len(self.export()) + + def public_key_hash( + self, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256 + ) -> bytes: + """Get key hash. + + :param algorithm: Used hash algorithm, defaults to sha256 + :return: Key Hash + """ + return self.get_public_key().key_hash(algorithm) + + def __repr__(self) -> str: + """Text short representation about the Certificate.""" + return f"Certificate, SN:{hex(self.cert.serial_number)}" + + def __str__(self) -> str: + """Text information about the Certificate.""" + not_valid_before = self.cert.not_valid_before.strftime("%d.%m.%Y (%H:%M:%S)") + not_valid_after = self.cert.not_valid_after.strftime("%d.%m.%Y (%H:%M:%S)") + nfo = "" + nfo += f" Certification Authority: {'YES' if self.ca else 'NO'}\n" + nfo += f" Serial Number: {hex(self.cert.serial_number)}\n" + nfo += f" Validity Range: {not_valid_before} - {not_valid_after}\n" + if self.signature_hash_algorithm: + nfo += ( + f" Signature Algorithm: {self.signature_hash_algorithm.name}\n" + ) + nfo += f" Self Issued: {'YES' if self.self_signed else 'NO'}\n" + + return nfo + + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :returns: Recreated certificate + """ + + def load_der_certificate(data: bytes) -> x509.Certificate: + """Load the DER certificate from bytes. + + This function is designed to eliminate cryptography exception + when the padded data is provided. + + :param data: Data with DER certificate + :return: Certificate (from cryptography library) + :raises SPSDKError: Unsupported certificate to load + """ + while True: + try: + return x509.load_der_x509_certificate(data) + except ValueError as exc: + if ( + len(exc.args) + and "kind: ExtraData" in exc.args[0] + and data[-1:] == b"\00" + ): + data = data[:-1] + else: + raise SPSDKValueError(str(exc)) from exc + + try: + cert = { + SPSDKEncoding.PEM: x509.load_pem_x509_certificate, + SPSDKEncoding.DER: load_der_certificate, + }[SPSDKEncoding.get_file_encodings(data)]( + data + ) # type: ignore + return Certificate(cert) # type: ignore + except ValueError as exc: + raise SPSDKError(f"Cannot load certificate: ({str(exc)})") from exc + + +def validate_certificate_chain(chain_list: List[Certificate]) -> List[bool]: + """Validate chain of certificates. + + :param chain_list: list of certificates in chain + :return: list of boolean values, which corresponds to the certificate validation in chain + :raises SPSDKError: When chain has less than two certificates + """ + if len(chain_list) <= 1: + raise SPSDKError("The chain must have at least two certificates") + result = [] + for i in range(len(chain_list) - 1): + result.append(chain_list[i].validate(chain_list[i + 1])) + return result + + +def validate_ca_flag_in_cert_chain(chain_list: List[Certificate]) -> bool: + """Validate CA flag in certification chain. + + :param chain_list: list of certificates in the chain + :return: true/false depending whether ca flag is set or not + """ + return chain_list[0].ca + + +X509NameConfig = Union[List[Dict[str, str]], Dict[str, Union[str, List[str]]]] + + +def generate_name(config: X509NameConfig) -> x509.Name: + """Generate x509 Name. + + :param config: subject/issuer description + :return: x509.Name + """ + attributes: List[x509.NameAttribute] = [] + + def _get_name_oid(name: str) -> x509.ObjectIdentifier: + try: + return getattr(SPSDKNameOID, name) + except Exception as exc: + raise SPSDKError(f"Invalid value of certificate attribute: {name}") from exc + + if isinstance(config, list): + for item in config: + for key, value in item.items(): + name_oid = _get_name_oid(key) + attributes.append(x509.NameAttribute(name_oid, str(value))) + + if isinstance(config, dict): + for key_second, value_second in config.items(): + name_oid = _get_name_oid(key_second) + if isinstance(value_second, list): + for value in value_second: + attributes.append(x509.NameAttribute(name_oid, str(value))) + else: + attributes.append(x509.NameAttribute(name_oid, str(value_second))) + + return x509.Name(attributes) + + +def generate_extensions(config: dict) -> List[x509.ExtensionType]: + """Get x509 extensions out of config data.""" + extensions: List[x509.ExtensionType] = [] + + for key, val in config.items(): + if key == "BASIC_CONSTRAINTS": + ca = bool(val["ca"]) + extensions.append( + x509.BasicConstraints( + ca=ca, path_length=val.get("path_length") if ca else None + ) + ) + if key == "WPC_QIAUTH_POLICY": + extensions.append(WPCQiAuthPolicy(value=val["value"])) + if key == "WPC_QIAUTH_RSID": + extensions.append(WPCQiAuthRSID(value=val["value"])) + return extensions + + +class WPCQiAuthPolicy(x509.UnrecognizedExtension): + """WPC Qi Auth Policy x509 extension.""" + + oid = x509.ObjectIdentifier("2.23.148.1.1") + + def __init__(self, value: int) -> None: + """Initialize the extension with given policy number.""" + super().__init__( + oid=self.oid, + value=b"\x04\x04" + value.to_bytes(length=4, byteorder="big"), + ) + + +class WPCQiAuthRSID(x509.UnrecognizedExtension): + """WPC Qi Auth RSID x509 extension.""" + + oid = x509.ObjectIdentifier("2.23.148.1.2") + + def __init__(self, value: str) -> None: + """Initialize the extension with given RSID in form of a hex-string.""" + super().__init__( + oid=self.oid, + value=b"\x04\x09" + bytes.fromhex(value).zfill(9), + ) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/exceptions.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/exceptions.py new file mode 100644 index 00000000..75a84d2d --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/exceptions.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Exceptions used in the Crypto module.""" + +from ..exceptions import SPSDKError + + +class SPSDKPCryptoError(SPSDKError): + """General SPSDK Crypto Error.""" + + +class SPSDKKeysNotMatchingError(SPSDKPCryptoError): + """Key pair not matching error.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hash.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hash.py new file mode 100644 index 00000000..546ab515 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hash.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""OpenSSL implementation Hash algorithms.""" + +# Used security modules + +from math import ceil + +from cryptography.hazmat.primitives import hashes + +from ..exceptions import SPSDKError +from ..utils.misc import Endianness +from ..utils.spsdk_enum import SpsdkEnum + + +class EnumHashAlgorithm(SpsdkEnum): + """Hash algorithm enum.""" + + SHA1 = (0, "sha1", "SHA1") + SHA256 = (1, "sha256", "SHA256") + SHA384 = (2, "sha384", "SHA384") + SHA512 = (3, "sha512", "SHA512") + MD5 = (4, "md5", "MD5") + SM3 = (5, "sm3", "SM3") + + +def get_hash_algorithm(algorithm: EnumHashAlgorithm) -> hashes.HashAlgorithm: + """For specified name return hashes algorithm instance. + + :param algorithm: Algorithm type enum + :return: instance of algorithm class + :raises SPSDKError: If algorithm not found + """ + algo_cls = getattr( + hashes, algorithm.label.upper(), None + ) # hack: get class object by name + if algo_cls is None: + raise SPSDKError(f"Unsupported algorithm: hashes.{algorithm.label.upper()}") + + return algo_cls() # pylint: disable=not-callable + + +def get_hash_length(algorithm: EnumHashAlgorithm) -> int: + """For specified name return hash binary length. + + :param algorithm: Algorithm type enum + :return: Hash length + :raises SPSDKError: If algorithm not found + """ + return get_hash_algorithm(algorithm).digest_size + + +class Hash: + """SPSDK Hash Class.""" + + def __init__(self, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256) -> None: + """Initialize hash object. + + :param algorithm: Algorithm type enum, defaults to EnumHashAlgorithm.SHA256 + """ + self.hash_obj = hashes.Hash(get_hash_algorithm(algorithm)) + + def update(self, data: bytes) -> None: + """Update the hash by new data. + + :param data: Data to be hashed + """ + self.hash_obj.update(data) + + def update_int(self, value: int) -> None: + """Update the hash by new integer value as is. + + :param value: Integer value to be hashed + """ + data = value.to_bytes( + length=ceil(value.bit_length() / 8), byteorder=Endianness.BIG.value + ) + self.update(data) + + def finalize(self) -> bytes: + """Finalize the hash and return the hash value. + + :returns: Computed hash + """ + return self.hash_obj.finalize() + + +def get_hash( + data: bytes, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256 +) -> bytes: + """Return a HASH from input data with specified algorithm. + + :param data: Input data in bytes + :param algorithm: Algorithm type enum + :return: Hash-ed bytes + :raises SPSDKError: If algorithm not found + """ + hash_obj = hashes.Hash(get_hash_algorithm(algorithm)) + hash_obj.update(data) + return hash_obj.finalize() diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hmac.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hmac.py new file mode 100644 index 00000000..338b7ee0 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/hmac.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""OpenSSL implementation for HMAC packet authentication.""" + +from cryptography.exceptions import InvalidSignature + +# Used security modules +from cryptography.hazmat.primitives import hmac as hmac_cls + +from .hash import EnumHashAlgorithm, get_hash_algorithm + + +def hmac( + key: bytes, data: bytes, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256 +) -> bytes: + """Return a HMAC from data with specified key and algorithm. + + :param key: The key in bytes format + :param data: Input data in bytes format + :param algorithm: Algorithm type for HASH function (sha256, sha384, sha512, ...) + :return: HMAC bytes + """ + hmac_obj = hmac_cls.HMAC(key, get_hash_algorithm(algorithm)) + hmac_obj.update(data) + return hmac_obj.finalize() + + +def hmac_validate( + key: bytes, + data: bytes, + signature: bytes, + algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256, +) -> bool: + """Return a HMAC from data with specified key and algorithm. + + :param key: The key in bytes format + :param data: Input data in bytes format + :param signature: HMAC signature to validate + :param algorithm: Algorithm type for HASH function (sha256, sha384, sha512, ...) + :return: HMAC bytes + """ + hmac_obj = hmac_cls.HMAC(key=key, algorithm=get_hash_algorithm(algorithm)) + hmac_obj.update(data) + try: + hmac_obj.verify(signature=signature) + return True + except InvalidSignature: + return False diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/keys.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/keys.py new file mode 100644 index 00000000..aa40bfc0 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/keys.py @@ -0,0 +1,1226 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""Module for key generation and saving keys to file.""" + +import abc +import getpass +import math +from enum import Enum +from typing import Any, Callable, Dict, Optional, Tuple, Union + +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import ec, padding, rsa, utils +from cryptography.hazmat.primitives.serialization import ( + BestAvailableEncryption, + NoEncryption, + PrivateFormat, + PublicFormat, +) +from cryptography.hazmat.primitives.serialization import ( + load_der_private_key as crypto_load_der_private_key, +) +from cryptography.hazmat.primitives.serialization import ( + load_der_public_key as crypto_load_der_public_key, +) +from cryptography.hazmat.primitives.serialization import ( + load_pem_private_key as crypto_load_pem_private_key, +) +from cryptography.hazmat.primitives.serialization import ( + load_pem_public_key as crypto_load_pem_public_key, +) +from typing_extensions import Self + +from ..exceptions import SPSDKError, SPSDKNotImplementedError, SPSDKValueError +from ..utils.abstract import BaseClass +from ..utils.misc import Endianness, load_binary, write_file +from .hash import EnumHashAlgorithm, get_hash, get_hash_algorithm +from .rng import rand_below, random_hex +from .types import SPSDKEncoding + + +def _load_pem_private_key(data: bytes, password: Optional[bytes]) -> Any: + """Load PEM Private key. + + :param data: key data + :param password: optional password + :raises SPSDKError: if the key cannot be decoded + :return: Key + """ + last_error: Exception + try: + return _crypto_load_private_key(SPSDKEncoding.PEM, data, password) + except (UnsupportedAlgorithm, ValueError) as exc: + last_error = exc + raise SPSDKError(f"Cannot load PEM private key: {last_error}") + + +def _load_der_private_key(data: bytes, password: Optional[bytes]) -> Any: + """Load DER Private key. + + :param data: key data + :param password: optional password + :raises SPSDKError: if the key cannot be decoded + :return: Key + """ + last_error: Exception + try: + return _crypto_load_private_key(SPSDKEncoding.DER, data, password) + except (UnsupportedAlgorithm, ValueError) as exc: + last_error = exc + raise SPSDKError(f"Cannot load DER private key: {last_error}") + + +def _crypto_load_private_key( + encoding: SPSDKEncoding, data: bytes, password: Optional[bytes] +) -> Union[ec.EllipticCurvePrivateKey, rsa.RSAPrivateKey]: + """Load Private key. + + :param encoding: Encoding of input data + :param data: Key data + :param password: Optional password + :raises SPSDKValueError: Unsupported encoding + :raises SPSDKWrongKeyPassphrase: Private key is encrypted and passphrase is incorrect + :raises SPSDKKeyPassphraseMissing: Private key is encrypted and passphrase is missing + :return: Key + """ + if encoding not in [SPSDKEncoding.DER, SPSDKEncoding.PEM]: + raise SPSDKValueError(f"Unsupported encoding: {encoding}") + crypto_load_function = { + SPSDKEncoding.DER: crypto_load_der_private_key, + SPSDKEncoding.PEM: crypto_load_pem_private_key, + }[encoding] + try: + private_key = crypto_load_function(data, password) + assert isinstance(private_key, (ec.EllipticCurvePrivateKey, rsa.RSAPrivateKey)) + return private_key + except ValueError as exc: + if "Incorrect password" in exc.args[0]: + raise SPSDKWrongKeyPassphrase("Provided password was incorrect.") from exc + raise exc + except TypeError as exc: + if "Password was not given but private key is encrypted" in str(exc): + raise SPSDKKeyPassphraseMissing(str(exc)) from exc + raise exc + + +def _load_pem_public_key(data: bytes) -> Any: + """Load PEM Public key. + + :param data: key data + :raises SPSDKError: if the key cannot be decoded + :return: PublicKey + """ + last_error: Exception + try: + return crypto_load_pem_public_key(data) + except (UnsupportedAlgorithm, ValueError) as exc: + last_error = exc + raise SPSDKError(f"Cannot load PEM public key: {last_error}") + + +def _load_der_public_key(data: bytes) -> Any: + """Load DER Public key. + + :param data: key data + :raises SPSDKError: if the key cannot be decoded + :return: PublicKey + """ + last_error: Exception + try: + return crypto_load_der_public_key(data) + except (UnsupportedAlgorithm, ValueError) as exc: + last_error = exc + raise SPSDKError(f"Cannot load DER private key: {last_error}") + + +class SPSDKInvalidKeyType(SPSDKError): + """Invalid Key Type.""" + + +class SPSDKKeyPassphraseMissing(SPSDKError): + """Passphrase for decryption of private key is missing.""" + + +class SPSDKWrongKeyPassphrase(SPSDKError): + """Passphrase for decryption of private key is wrong.""" + + +class PrivateKey(BaseClass, abc.ABC): + """SPSDK Private Key.""" + + key: Any + + @classmethod + @abc.abstractmethod + def generate_key(cls) -> Self: + """Generate SPSDK Key (private key). + + :return: SPSDK private key + """ + + @property + @abc.abstractmethod + def signature_size(self) -> int: + """Size of signature data.""" + + @property + @abc.abstractmethod + def key_size(self) -> int: + """Key size in bits. + + :return: Key Size + """ + + @abc.abstractmethod + def get_public_key(self) -> "PublicKey": + """Generate public key. + + :return: Public key + """ + + @abc.abstractmethod + def verify_public_key(self, public_key: "PublicKey") -> bool: + """Verify public key. + + :param public_key: Public key to verify + :return: True if is in pair, False otherwise + """ + + def __eq__(self, obj: Any) -> bool: + """Check object equality.""" + return ( + isinstance(obj, self.__class__) + and self.get_public_key() == obj.get_public_key() + ) + + def save( + self, + file_path: str, + password: Optional[str] = None, + encoding: SPSDKEncoding = SPSDKEncoding.PEM, + ) -> None: + """Save the Private key to the given file. + + :param file_path: path to the file, where the key will be stored + :param password: password to private key; None to store without password + :param encoding: encoding type, default is PEM + """ + write_file( + self.export(password=password, encoding=encoding), file_path, mode="wb" + ) + + @classmethod + def load(cls, file_path: str, password: Optional[str] = None) -> Self: + """Load the Private key from the given file. + + :param file_path: path to the file, where the key is stored + :param password: password to private key; None to load without password + """ + data = load_binary(file_path) + return cls.parse(data=data, password=password) + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """Sign input data. + + :param data: Input data + :return: Signed data + """ + + @abc.abstractmethod + def export( + self, + password: Optional[str] = None, + encoding: SPSDKEncoding = SPSDKEncoding.DER, + ) -> bytes: + """Export key into bytes in requested format. + + :param password: password to private key; None to store without password + :param encoding: encoding type, default is DER + :return: Byte representation of key + """ + + @classmethod + def parse(cls, data: bytes, password: Optional[str] = None) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :param password: password to private key; None to store without password + :returns: Recreated key + """ + try: + private_key = { + SPSDKEncoding.PEM: _load_pem_private_key, + SPSDKEncoding.DER: _load_der_private_key, + }[SPSDKEncoding.get_file_encodings(data)]( + data, password.encode("utf-8") if password else None + ) + if isinstance(private_key, (ec.EllipticCurvePrivateKey, rsa.RSAPrivateKey)): + return cls.create(private_key) + except (ValueError, SPSDKInvalidKeyType) as exc: + raise SPSDKError(f"Cannot load private key: ({str(exc)})") from exc + raise SPSDKError(f"Unsupported private key: ({str(private_key)})") + + @classmethod + def create(cls, key: Any) -> Self: + """Create Private Key object. + + :param key: Supported private key. + :raises SPSDKInvalidKeyType: Unsupported private key given + :return: SPSDK Private Kye object + """ + SUPPORTED_KEYS = { + PrivateKeyEcc: ec.EllipticCurvePrivateKey, + PrivateKeyRsa: rsa.RSAPrivateKey, + } + for k, v in SUPPORTED_KEYS.items(): + if isinstance(key, v): + return k(key) + + raise SPSDKInvalidKeyType(f"Unsupported key type: {str(key)}") + + +class PublicKey(BaseClass, abc.ABC): + """SPSDK Public Key.""" + + key: Any + + @property + @abc.abstractmethod + def signature_size(self) -> int: + """Size of signature data.""" + + @property + @abc.abstractmethod + def public_numbers(self) -> Any: + """Public numbers.""" + + def save(self, file_path: str, encoding: SPSDKEncoding = SPSDKEncoding.PEM) -> None: + """Save the public key to the file. + + :param file_path: path to the file, where the key will be stored + :param encoding: encoding type, default is PEM + """ + write_file(data=self.export(encoding=encoding), path=file_path, mode="wb") + + @classmethod + def load(cls, file_path: str) -> Self: + """Load the Public key from the given file. + + :param file_path: path to the file, where the key is stored + """ + data = load_binary(file_path) + return cls.parse(data=data) + + @abc.abstractmethod + def verify_signature( + self, + signature: bytes, + data: bytes, + algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256, + ) -> bool: + """Verify input data. + + :param signature: The signature of input data + :param data: Input data + :param algorithm: Used algorithm + :return: True if signature is valid, False otherwise + """ + + @abc.abstractmethod + def export(self, encoding: SPSDKEncoding = SPSDKEncoding.NXP) -> bytes: + """Export key into bytes to requested format. + + :param encoding: encoding type, default is NXP + :return: Byte representation of key + """ + + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :returns: Recreated key + """ + try: + public_key = { + SPSDKEncoding.PEM: _load_pem_public_key, + SPSDKEncoding.DER: _load_der_public_key, + }[SPSDKEncoding.get_file_encodings(data)](data) + if isinstance(public_key, (ec.EllipticCurvePublicKey, rsa.RSAPublicKey)): + return cls.create(public_key) + except (ValueError, SPSDKInvalidKeyType) as exc: + raise SPSDKError(f"Cannot load public key: ({str(exc)})") from exc + raise SPSDKError(f"Unsupported public key: ({str(public_key)})") + + def key_hash( + self, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256 + ) -> bytes: + """Get key hash. + + :param algorithm: Used hash algorithm, defaults to sha256 + :return: Key Hash + """ + return get_hash(self.export(), algorithm) + + def __eq__(self, obj: Any) -> bool: + """Check object equality.""" + return ( + isinstance(obj, self.__class__) + and self.public_numbers == obj.public_numbers + ) + + @classmethod + def create(cls, key: Any) -> Self: + """Create Public Key object. + + :param key: Supported public key. + :raises SPSDKInvalidKeyType: Unsupported public key given + :return: SPSDK Public Kye object + """ + SUPPORTED_KEYS = { + PublicKeyEcc: ec.EllipticCurvePublicKey, + PublicKeyRsa: rsa.RSAPublicKey, + } + for k, v in SUPPORTED_KEYS.items(): + if isinstance(key, v): + return k(key) + + raise SPSDKInvalidKeyType(f"Unsupported key type: {str(key)}") + + +# =================================================================================================== +# =================================================================================================== +# +# RSA Keys +# +# =================================================================================================== +# =================================================================================================== + + +class PrivateKeyRsa(PrivateKey): + """SPSDK Private Key.""" + + SUPPORTED_KEY_SIZES = [2048, 3072, 4096] + + key: rsa.RSAPrivateKey + + def __init__(self, key: rsa.RSAPrivateKey) -> None: + """Create SPSDK Key. + + :param key: Only RSA key is accepted + """ + self.key = key + + @classmethod + def generate_key(cls, key_size: int = 2048, exponent: int = 65537) -> Self: + """Generate SPSDK Key (private key). + + :param key_size: key size in bits; must be >= 512 + :param exponent: public exponent; must be >= 3 and odd + :return: SPSDK private key + """ + return cls( + rsa.generate_private_key( + public_exponent=exponent, + key_size=key_size, + ) + ) + + @property + def signature_size(self) -> int: + """Size of signature data.""" + return self.key.key_size // 8 + + @property + def key_size(self) -> int: + """Key size in bits. + + :return: Key Size + """ + return self.key.key_size + + def get_public_key(self) -> "PublicKeyRsa": + """Generate public key. + + :return: Public key + """ + return PublicKeyRsa(self.key.public_key()) + + def verify_public_key(self, public_key: PublicKey) -> bool: + """Verify public key. + + :param public_key: Public key to verify + :return: True if is in pair, False otherwise + """ + return self.get_public_key() == public_key + + def export( + self, + password: Optional[str] = None, + encoding: SPSDKEncoding = SPSDKEncoding.DER, + ) -> bytes: + """Export the Private key to the bytes in requested encoding. + + :param password: password to private key; None to store without password + :param encoding: encoding type, default is DER + :returns: Private key in bytes + """ + enc = ( + BestAvailableEncryption(password=password.encode("utf-8")) + if password + else NoEncryption() + ) + return self.key.private_bytes( + SPSDKEncoding.get_cryptography_encodings(encoding), PrivateFormat.PKCS8, enc + ) + + def sign( + self, data: bytes, algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256 + ) -> bytes: + """Sign input data. + + :param data: Input data + :param algorithm: Used algorithm + :return: Signed data + """ + signature = self.key.sign( + data=data, + padding=padding.PKCS1v15(), + algorithm=get_hash_algorithm(algorithm), + ) + return signature + + @classmethod + def parse(cls, data: bytes, password: Optional[str] = None) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :param password: password to private key; None to store without password + :returns: Recreated key + """ + key = super().parse(data=data, password=password) + if isinstance(key, PrivateKeyRsa): + return key + + raise SPSDKInvalidKeyType("Can't parse Rsa private key from given data") + + def __repr__(self) -> str: + return f"RSA{self.key_size} Private Key" + + def __str__(self) -> str: + """Object description in string format.""" + ret = ( + f"RSA{self.key_size} Private key: \nd({hex(self.key.private_numbers().d)})" + ) + return ret + + +class PublicKeyRsa(PublicKey): + """SPSDK Public Key.""" + + key: rsa.RSAPublicKey + + def __init__(self, key: rsa.RSAPublicKey) -> None: + """Create SPSDK Public Key. + + :param key: SPSDK Public Key data or file path + """ + self.key = key + + @property + def signature_size(self) -> int: + """Size of signature data.""" + return self.key.key_size // 8 + + @property + def key_size(self) -> int: + """Key size in bits. + + :return: Key Size + """ + return self.key.key_size + + @property + def public_numbers(self) -> rsa.RSAPublicNumbers: + """Public numbers of key. + + :return: Public numbers + """ + return self.key.public_numbers() + + @property + def e(self) -> int: + """Public number E. + + :return: E + """ + return self.public_numbers.e + + @property + def n(self) -> int: + """Public number N. + + :return: N + """ + return self.public_numbers.n + + def export( + self, + encoding: SPSDKEncoding = SPSDKEncoding.NXP, + exp_length: Optional[int] = None, + modulus_length: Optional[int] = None, + ) -> bytes: + """Save the public key to the bytes in NXP or DER format. + + :param encoding: encoding type, default is NXP + :param exp_length: Optional specific exponent length in bytes + :param modulus_length: Optional specific modulus length in bytes + :returns: Public key in bytes + """ + if encoding == SPSDKEncoding.NXP: + exp_rotk = self.e + mod_rotk = self.n + exp_length = exp_length or math.ceil(exp_rotk.bit_length() / 8) + modulus_length = modulus_length or math.ceil(mod_rotk.bit_length() / 8) + exp_rotk_bytes = exp_rotk.to_bytes(exp_length, Endianness.BIG.value) + mod_rotk_bytes = mod_rotk.to_bytes(modulus_length, Endianness.BIG.value) + return mod_rotk_bytes + exp_rotk_bytes + + return self.key.public_bytes( + SPSDKEncoding.get_cryptography_encodings(encoding), PublicFormat.PKCS1 + ) + + def verify_signature( + self, + signature: bytes, + data: bytes, + algorithm: EnumHashAlgorithm = EnumHashAlgorithm.SHA256, + ) -> bool: + """Verify input data. + + :param signature: The signature of input data + :param data: Input data + :param algorithm: Used algorithm + :return: True if signature is valid, False otherwise + """ + try: + self.key.verify( + signature=signature, + data=data, + padding=padding.PKCS1v15(), + algorithm=get_hash_algorithm(algorithm), + ) + except InvalidSignature: + return False + + return True + + def __eq__(self, obj: Any) -> bool: + """Check object equality.""" + return ( + isinstance(obj, self.__class__) + and self.public_numbers == obj.public_numbers + ) + + def __repr__(self) -> str: + return f"RSA{self.key_size} Public Key" + + def __str__(self) -> str: + """Object description in string format.""" + ret = f"RSA{self.key_size} Public key: \ne({hex(self.e)}) \nn({hex(self.n)})" + return ret + + @classmethod + def recreate(cls, exponent: int, modulus: int) -> Self: + """Recreate RSA public key from Exponent and modulus. + + :param exponent: Exponent of RSA key. + :param modulus: Modulus of RSA key. + :return: RSA public key. + """ + public_numbers = rsa.RSAPublicNumbers(e=exponent, n=modulus) + return cls(public_numbers.public_key()) + + @staticmethod + def recreate_public_numbers(data: bytes) -> rsa.RSAPublicNumbers: + """Recreate public numbers from data. + + :param data: Dat with raw key. + :raises SPSDKError: Un recognized data. + :return: RAS public numbers. + """ + data_len = len(data) + for key_size in PrivateKeyRsa.SUPPORTED_KEY_SIZES: + key_size_bytes = key_size // 8 + if key_size_bytes + 3 <= data_len <= key_size_bytes + 4: + n = int.from_bytes(data[:key_size_bytes], Endianness.BIG.value) + e = int.from_bytes(data[key_size_bytes:], Endianness.BIG.value) + return rsa.RSAPublicNumbers(e=e, n=n) + + raise SPSDKError(f"Unsupported RSA key to recreate with data size {data_len}") + + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :returns: Recreated key + """ + try: + key = super().parse(data=data) + if isinstance(key, PublicKeyRsa): + return key + except SPSDKError: + public_numbers = PublicKeyRsa.recreate_public_numbers(data) + return PublicKeyRsa(public_numbers.public_key()) # type:ignore + + raise SPSDKInvalidKeyType("Can't parse RSA public key from given data") + + +# =================================================================================================== +# =================================================================================================== +# +# Elliptic Curves Keys +# +# =================================================================================================== +# =================================================================================================== + + +class EccCurve(str, Enum): + """Supported ecc key types.""" + + SECP256R1 = "secp256r1" + SECP384R1 = "secp384r1" + SECP521R1 = "secp521r1" + + +class SPSDKUnsupportedEccCurve(SPSDKValueError): + """Unsupported Ecc curve error.""" + + +class KeyEccCommon: + """SPSDK Common Key.""" + + key: Union[ec.EllipticCurvePrivateKey, ec.EllipticCurvePublicKey] + + @property + def coordinate_size(self) -> int: + """Size of signature data.""" + return math.ceil(self.key.key_size / 8) + + @property + def signature_size(self) -> int: + """Size of signature data.""" + return self.coordinate_size * 2 + + @property + def curve(self) -> EccCurve: + """Curve type.""" + return EccCurve(self.key.curve.name) + + @property + def key_size(self) -> int: + """Key size in bits.""" + return self.key.key_size + + @staticmethod + def _get_ec_curve_object(name: EccCurve) -> ec.EllipticCurve: + """Get the EC curve object by its name. + + :param name: Name of EC curve. + :return: EC curve object. + :raises SPSDKValueError: Invalid EC curve name. + """ + # pylint: disable=protected-access + for key_object in ec._CURVE_TYPES: + if key_object.lower() == name.lower(): + # pylint: disable=protected-access + return ec._CURVE_TYPES[key_object] + + raise SPSDKValueError(f"The EC curve with name '{name}' is not supported.") + + @staticmethod + def serialize_signature(signature: bytes, coordinate_length: int) -> bytes: + """Re-format ECC ANS.1 DER signature into the format used by ROM code.""" + r, s = utils.decode_dss_signature(signature) + + r_bytes = r.to_bytes(coordinate_length, Endianness.BIG.value) + s_bytes = s.to_bytes(coordinate_length, Endianness.BIG.value) + return r_bytes + s_bytes + + +class PrivateKeyEcc(KeyEccCommon, PrivateKey): + """SPSDK Private Key.""" + + key: ec.EllipticCurvePrivateKey + + def __init__(self, key: ec.EllipticCurvePrivateKey) -> None: + """Create SPSDK Ecc Private Key. + + :param key: Only Ecc key is accepted + """ + self.key = key + + @classmethod + def generate_key(cls, curve_name: EccCurve = EccCurve.SECP256R1) -> Self: + """Generate SPSDK Key (private key). + + :param curve_name: Name of curve + :return: SPSDK private key + """ + curve_obj = cls._get_ec_curve_object(curve_name) + prv = ec.generate_private_key(curve_obj) + return cls(prv) + + def exchange(self, peer_public_key: "PublicKeyEcc") -> bytes: + """Exchange key using ECDH algorithm with provided peer public key. + + :param peer_public_key: Peer public key + :return: Shared key + """ + return self.key.exchange( + algorithm=ec.ECDH(), peer_public_key=peer_public_key.key + ) + + def get_public_key(self) -> "PublicKeyEcc": + """Generate public key. + + :return: Public key + """ + return PublicKeyEcc(self.key.public_key()) + + def verify_public_key(self, public_key: PublicKey) -> bool: + """Verify public key. + + :param public_key: Public key to verify + :return: True if is in pair, False otherwise + """ + return self.get_public_key() == public_key + + def export( + self, + password: Optional[str] = None, + encoding: SPSDKEncoding = SPSDKEncoding.DER, + ) -> bytes: + """Export the Private key to the bytes in requested format. + + :param password: password to private key; None to store without password + :param encoding: encoding type, default is DER + :returns: Private key in bytes + """ + return self.key.private_bytes( + encoding=SPSDKEncoding.get_cryptography_encodings(encoding), + format=PrivateFormat.PKCS8, + encryption_algorithm=BestAvailableEncryption(password.encode("utf-8")) + if password + else NoEncryption(), + ) + + def sign( + self, + data: bytes, + algorithm: Optional[EnumHashAlgorithm] = None, + der_format: bool = False, + prehashed: bool = False, + ) -> bytes: + """Sign input data. + + :param data: Input data + :param algorithm: Used algorithm + :param der_format: Use DER format as a output + :param prehashed: Use pre hashed value as input + :return: Signed data + """ + hash_name = ( + algorithm + or { + 256: EnumHashAlgorithm.SHA256, + 384: EnumHashAlgorithm.SHA384, + 521: EnumHashAlgorithm.SHA512, + }[self.key.key_size] + ) + if prehashed: + signature_algorithm = ec.ECDSA( + utils.Prehashed(get_hash_algorithm(hash_name)) + ) + else: + signature_algorithm = ec.ECDSA(get_hash_algorithm(hash_name)) + signature = self.key.sign(data, signature_algorithm) + + if der_format: + return signature + + return self.serialize_signature(signature, self.coordinate_size) + + @property + def d(self) -> int: + """Private number D.""" + return self.key.private_numbers().private_value + + @classmethod + def parse(cls, data: bytes, password: Optional[str] = None) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :param password: password to private key; None to store without password + :returns: Recreated key + """ + key = super().parse(data=data, password=password) + if isinstance(key, PrivateKeyEcc): + return key + + raise SPSDKInvalidKeyType("Can't parse Ecc private key from given data") + + @classmethod + def recreate(cls, d: int, curve: EccCurve) -> Self: + """Recreate ECC private key from private key number. + + :param d: Private number D. + :param curve: ECC curve. + + :return: ECC private key. + """ + key = ec.derive_private_key(d, cls._get_ec_curve_object(curve)) + return cls(key) + + def __repr__(self) -> str: + return f"ECC {self.curve} Private Key" + + def __str__(self) -> str: + """Object description in string format.""" + return f"ECC ({self.curve}) Private key: \nd({hex(self.d)})" + + +class PublicKeyEcc(KeyEccCommon, PublicKey): + """SPSDK Public Key.""" + + key: ec.EllipticCurvePublicKey + + def __init__(self, key: ec.EllipticCurvePublicKey) -> None: + """Create SPSDK Public Key. + + :param key: SPSDK Public Key data or file path + """ + self.key = key + + def export(self, encoding: SPSDKEncoding = SPSDKEncoding.NXP) -> bytes: + """Export the public key to the bytes in requested format. + + :param encoding: encoding type, default is NXP + :returns: Public key in bytes + """ + if encoding == SPSDKEncoding.NXP: + x_bytes = self.x.to_bytes(self.coordinate_size, Endianness.BIG.value) + y_bytes = self.y.to_bytes(self.coordinate_size, Endianness.BIG.value) + return x_bytes + y_bytes + + return self.key.public_bytes( + SPSDKEncoding.get_cryptography_encodings(encoding), + PublicFormat.SubjectPublicKeyInfo, + ) + + def verify_signature( + self, + signature: bytes, + data: bytes, + algorithm: Optional[EnumHashAlgorithm] = None, + prehashed: bool = False, + ) -> bool: + """Verify input data. + + :param signature: The signature of input data + :param data: Input data + :param algorithm: Used algorithm + :param prehashed: Use pre hashed value as input + :return: True if signature is valid, False otherwise + """ + coordinate_size = math.ceil(self.key.key_size / 8) + hash_name = ( + algorithm + or { + 256: EnumHashAlgorithm.SHA256, + 384: EnumHashAlgorithm.SHA384, + 521: EnumHashAlgorithm.SHA512, + }[self.key.key_size] + ) + + if prehashed: + signature_algorithm = ec.ECDSA( + utils.Prehashed(get_hash_algorithm(hash_name)) + ) + else: + signature_algorithm = ec.ECDSA(get_hash_algorithm(hash_name)) + + if len(signature) == self.signature_size: + der_signature = utils.encode_dss_signature( + int.from_bytes( + signature[:coordinate_size], byteorder=Endianness.BIG.value + ), + int.from_bytes( + signature[coordinate_size:], byteorder=Endianness.BIG.value + ), + ) + else: + der_signature = signature + try: + # pylint: disable=no-value-for-parameter # pylint is mixing RSA and ECC verify methods + self.key.verify(der_signature, data, signature_algorithm) + return True + except InvalidSignature: + return False + + @property + def public_numbers(self) -> ec.EllipticCurvePublicNumbers: + """Public numbers of key. + + :return: Public numbers + """ + return self.key.public_numbers() + + @property + def x(self) -> int: + """Public number X. + + :return: X + """ + return self.public_numbers.x + + @property + def y(self) -> int: + """Public number Y. + + :return: Y + """ + return self.public_numbers.y + + @classmethod + def recreate(cls, coor_x: int, coor_y: int, curve: EccCurve) -> Self: + """Recreate ECC public key from coordinates. + + :param coor_x: X coordinate of point on curve. + :param coor_y: Y coordinate of point on curve. + :param curve: ECC curve. + :return: ECC public key. + """ + pub_numbers = ec.EllipticCurvePublicNumbers( + x=coor_x, y=coor_y, curve=PrivateKeyEcc._get_ec_curve_object(curve) + ) + key = pub_numbers.public_key() + return cls(key) + + @classmethod + def recreate_from_data(cls, data: bytes, curve: Optional[EccCurve] = None) -> Self: + """Recreate ECC public key from coordinates in data blob. + + :param data: Data blob of coordinates in bytes (X,Y in Big Endian) + :param curve: ECC curve. + :return: ECC public key. + """ + + def get_curve( + data_length: int, curve: Optional[EccCurve] = None + ) -> Tuple[EccCurve, bool]: + curve_list = [curve] if curve else list(EccCurve) + for cur in curve_list: + curve_obj = KeyEccCommon._get_ec_curve_object(EccCurve(cur)) + curve_sign_size = math.ceil(curve_obj.key_size / 8) * 2 + # Check raw binary format + if curve_sign_size == data_length: + return (cur, False) + # Check DER binary format + curve_sign_size += 7 + if curve_sign_size <= data_length <= curve_sign_size + 2: + return (cur, True) + raise SPSDKUnsupportedEccCurve( + f"Cannot recreate ECC curve with {data_length} length" + ) + + data_length = len(data) + (curve, der_format) = get_curve(data_length, curve) + + if der_format: + der = _load_der_public_key(data) + assert isinstance(der, ec.EllipticCurvePublicKey) + return cls(der) + + coordinate_length = data_length // 2 + coor_x = int.from_bytes( + data[:coordinate_length], byteorder=Endianness.BIG.value + ) + coor_y = int.from_bytes( + data[coordinate_length:], byteorder=Endianness.BIG.value + ) + return cls.recreate(coor_x=coor_x, coor_y=coor_y, curve=curve) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array. + + :param data: Data to be parsed + :returns: Recreated key + """ + try: + key = super().parse(data=data) + if isinstance(key, PublicKeyEcc): + return key + except SPSDKError: + return cls.recreate_from_data(data=data) + + raise SPSDKInvalidKeyType("Can't parse ECC public key from given data") + + def __repr__(self) -> str: + return f"ECC {self.curve} Public Key" + + def __str__(self) -> str: + """Object description in string format.""" + return f"ECC ({self.curve}) Public key: \nx({hex(self.x)}) \ny({hex(self.y)})" + + +class ECDSASignature: + """ECDSA Signature.""" + + COORDINATE_LENGTHS = { + EccCurve.SECP256R1: 32, + EccCurve.SECP384R1: 48, + EccCurve.SECP521R1: 66, + } + + def __init__(self, r: int, s: int, ecc_curve: EccCurve) -> None: + """ECDSA Signature constructor. + + :param r: r value of signature + :param s: s value of signature + :param ecc_curve: ECC Curve enum + """ + self.r = r + self.s = s + self.ecc_curve = ecc_curve + + @classmethod + def parse(cls, signature: bytes) -> Self: + """Parse signature in DER or NXP format. + + :param signature: Signature binary + """ + encoding = cls.get_encoding(signature) + if encoding == SPSDKEncoding.DER: + r, s = utils.decode_dss_signature(signature) + ecc_curve = cls.get_ecc_curve(len(signature)) + return cls(r, s, ecc_curve) + if encoding == SPSDKEncoding.NXP: + r = int.from_bytes(signature[: len(signature) // 2], Endianness.BIG.value) + s = int.from_bytes(signature[len(signature) // 2 :], Endianness.BIG.value) + ecc_curve = cls.get_ecc_curve(len(signature)) + return cls(r, s, ecc_curve) + raise SPSDKValueError(f"Invalid signature encoding {encoding.value}") + + def export(self, encoding: SPSDKEncoding = SPSDKEncoding.NXP) -> bytes: + """Export signature in DER or NXP format. + + :param encoding: Signature encoding + :return: Signature as bytes + """ + if encoding == SPSDKEncoding.NXP: + r_bytes = self.r.to_bytes( + self.COORDINATE_LENGTHS[self.ecc_curve], Endianness.BIG.value + ) + s_bytes = self.s.to_bytes( + self.COORDINATE_LENGTHS[self.ecc_curve], Endianness.BIG.value + ) + return r_bytes + s_bytes + if encoding == SPSDKEncoding.DER: + return utils.encode_dss_signature(self.r, self.s) + raise SPSDKValueError(f"Invalid signature encoding {encoding.value}") + + @classmethod + def get_encoding(cls, signature: bytes) -> SPSDKEncoding: + """Get encoding of signature. + + :param signature: Signature + """ + signature_length = len(signature) + # Try detect the NXP format by data length + if signature_length // 2 in cls.COORDINATE_LENGTHS.values(): + return SPSDKEncoding.NXP + # Try detect the DER format by decode of header + try: + utils.decode_dss_signature(signature) + return SPSDKEncoding.DER + except ValueError: + pass + raise SPSDKValueError( + f"The given signature with length {signature_length} does not match any encoding" + ) + + @classmethod + def get_ecc_curve(cls, signature_length: int) -> EccCurve: + """Get the Elliptic Curve of signature. + + :param signature_length: Signature length + """ + for curve, coord_len in cls.COORDINATE_LENGTHS.items(): + if signature_length == coord_len * 2: + return curve + if signature_length in range(coord_len * 2 + 3, coord_len * 2 + 9): + return curve + raise SPSDKValueError( + f"The given signature with length {signature_length} does not match any ecc curve" + ) + + +# # =================================================================================================== +# # =================================================================================================== +# # +# # General section +# # +# # =================================================================================================== +# # =================================================================================================== + +GeneratorParams = Dict[str, Union[int, str, bool]] +KeyGeneratorInfo = Dict[str, Tuple[Callable[..., PrivateKey], GeneratorParams]] + + +def get_supported_keys_generators() -> KeyGeneratorInfo: + """Generate list with list of supported key types. + + :return: `KeyGeneratorInfo` dictionary of supported key types. + """ + ret: KeyGeneratorInfo = { + # RSA keys + "rsa2048": (PrivateKeyRsa.generate_key, {"key_size": 2048}), + "rsa3072": (PrivateKeyRsa.generate_key, {"key_size": 3072}), + "rsa4096": (PrivateKeyRsa.generate_key, {"key_size": 4096}), + # ECC keys + "secp256r1": (PrivateKeyEcc.generate_key, {"curve_name": "secp256r1"}), + "secp384r1": (PrivateKeyEcc.generate_key, {"curve_name": "secp384r1"}), + "secp521r1": (PrivateKeyEcc.generate_key, {"curve_name": "secp521r1"}), + } + + return ret + + +def get_ecc_curve(key_length: int) -> EccCurve: + """Get curve name for Crypto library. + + :param key_length: Length of ecc key in bytes + """ + if key_length <= 32 or key_length == 64: + return EccCurve.SECP256R1 + if key_length <= 48 or key_length == 96: + return EccCurve.SECP384R1 + if key_length <= 66: + return EccCurve.SECP521R1 + raise SPSDKError(f"Not sure what curve corresponds to {key_length} data") + + +def prompt_for_passphrase() -> str: + """Prompt interactively for private key passphrase.""" + password = getpass.getpass( + prompt="Private key is encrypted. Enter password: ", stream=None + ) + return password diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/rng.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/rng.py new file mode 100644 index 00000000..f03a1ed4 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/rng.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Implementation for getting random numbers.""" + +# Used security modules + + +from secrets import randbelow, token_bytes, token_hex + + +def random_bytes(length: int) -> bytes: + """Return a random byte string with specified length. + + :param length: The length in bytes + :return: Random bytes + """ + return token_bytes(length) + + +def random_hex(length: int) -> str: + """Return a random hex string with specified length. + + :param length: The length in bytes + :return: Random hex + """ + return token_hex(length) + + +def rand_below(upper_bound: int) -> int: + """Return a random number in range [0, upper_bound]. + + :param upper_bound: Upper bound + :return: Random number + """ + return randbelow(upper_bound) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/signature_provider.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/signature_provider.py new file mode 100644 index 00000000..77fd17b8 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/signature_provider.py @@ -0,0 +1,437 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""SignatureProvider is an Interface for all potential signature providers. + +Each concrete signature provider needs to implement: +- sign(data: bytes) -> bytes +- signature_length -> int +- into() -> str +""" + +import abc +import json +import logging +from types import ModuleType +from typing import Any, Dict, List, Optional, Tuple, Type, Union + +import requests +from cryptography.hazmat.primitives.hashes import HashAlgorithm + +from ..crypto.exceptions import SPSDKKeysNotMatchingError +from ..crypto.hash import EnumHashAlgorithm, get_hash_algorithm +from ..crypto.keys import ( + ECDSASignature, + PrivateKey, + PrivateKeyEcc, + PrivateKeyRsa, + PublicKeyEcc, + PublicKeyRsa, + SPSDKKeyPassphraseMissing, + prompt_for_passphrase, +) +from ..crypto.types import SPSDKEncoding +from ..exceptions import ( + SPSDKError, + SPSDKKeyError, + SPSDKUnsupportedOperation, + SPSDKValueError, +) +from ..utils.misc import find_file +from ..utils.plugins import PluginsManager, PluginType + +logger = logging.getLogger(__name__) + + +class SignatureProvider(abc.ABC): + """Abstract class (Interface) for all signature providers.""" + + # Subclasses override the following signature provider type + sp_type = "INVALID" + reserved_keys = ["type", "search_paths"] + + @abc.abstractmethod + def sign(self, data: bytes) -> bytes: + """Return signature for data.""" + + @property + @abc.abstractmethod + def signature_length(self) -> int: + """Return length of the signature.""" + + def verify_public_key(self, public_key: bytes) -> bool: + """Verify if given public key matches private key.""" + raise SPSDKUnsupportedOperation("Verify method is not supported.") + + def get_signature(self, data: bytes) -> bytes: + """Get signature. In case of ECC signature, the NXP format(r+s) is used. + + :param data: Data to be signed. + :return: Signature of the data + + """ + signature = self.sign(data) + try: + ecdsa_sig = ECDSASignature.parse(signature) + signature = ecdsa_sig.export(SPSDKEncoding.NXP) + except SPSDKValueError: + pass # Not an ECC signature + if len(signature) != self.signature_length: + logger.warning( + f"Signature has unexpected length: {len(signature)}. Expected length: {self.signature_length}" + ) + return signature + + def info(self) -> str: + """Provide information about the Signature provider.""" + return self.__class__.__name__ + + @staticmethod + def convert_params(params: str) -> Dict[str, str]: + """Coverts creation params from string into dictionary. + + e.g.: "type=file;file_path=some_path" -> {'type': 'file', 'file_path': 'some_path'} + :param params: Params in the mentioned format. + :raises: SPSDKKeyError: Duplicate key found. + :raises: SPSDKValueError: Parameter must meet the following pattern: type=file;file_path=some_path. + :return: Converted dictionary of parameters. + """ + result: Dict[str, str] = {} + try: + for p in params.split(";"): + key, value = p.split("=") + + # Check for duplicate keys + if key in result: + raise SPSDKKeyError(f"Duplicate key found: {key}") + + result[key] = value + + except ValueError as e: + raise SPSDKValueError( + "Parameter must meet the following pattern: type=file;file_path=some_path" + ) from e + + return result + + @classmethod + def get_types(cls) -> List[str]: + """Returns a list of all available signature provider types.""" + return [sub_class.sp_type for sub_class in cls.__subclasses__()] + + @classmethod + def filter_params(cls, klass: Any, params: Dict[str, str]) -> Dict[str, str]: + """Remove unused parameters from the given dictionary based on the class constructor. + + :param klass: Signature provider class. + :param params: Dictionary of parameters. + :return: Filtered dictionary of parameters. + """ + unused_params = set(params) - set(klass.__init__.__code__.co_varnames) + for key in cls.reserved_keys: + if key in unused_params: + del params[key] + return params + + @classmethod + def create(cls, params: Union[str, dict]) -> Optional["SignatureProvider"]: + """Creates an concrete instance of signature provider.""" + load_plugins() + if isinstance(params, str): + params = cls.convert_params(params) + sp_classes = cls.get_all_signature_providers() + for ( + klass + ) in sp_classes: # pragma: no branch # there always be at least one subclass + if klass.sp_type == params["type"]: + klass.filter_params(klass, params) + return klass(**params) + + logger.info(f"Signature provider of type {params['type']} was not found.") + return None + + @staticmethod + def get_all_signature_providers() -> List[Type["SignatureProvider"]]: + """Get list of all available signature providers.""" + + def get_subclasses( + base_class: Type, + ) -> List[Type["SignatureProvider"]]: + """Recursively find all subclasses.""" + subclasses = [] + for subclass in base_class.__subclasses__(): + subclasses.append(subclass) + subclasses.extend(get_subclasses(subclass)) + return subclasses + + return get_subclasses(SignatureProvider) + + +class PlainFileSP(SignatureProvider): + """PlainFileSP is a SignatureProvider implementation that uses plain local files.""" + + sp_type = "file" + + def __init__( + self, + file_path: str, + password: Optional[str] = None, + hash_alg: Optional[EnumHashAlgorithm] = None, + search_paths: Optional[List[str]] = None, + ) -> None: + """Initialize the plain file signature provider. + + :param file_path: Path to private file + :param password: Password in case of encrypted private file, defaults to None + :param hash_alg: Hash for the signature, defaults to None + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Invalid Private Key + """ + self.file_path = find_file(file_path=file_path, search_paths=search_paths) + self.private_key = PrivateKey.load(self.file_path, password=password) + self.hash_alg = self._get_hash_algorithm(hash_alg) + + def _get_hash_algorithm( + self, hash_alg: Optional[EnumHashAlgorithm] = None + ) -> HashAlgorithm: + if hash_alg: + hash_alg_name = hash_alg + else: + if isinstance(self.private_key, PrivateKeyRsa): + hash_alg_name = EnumHashAlgorithm.SHA256 + + elif isinstance(self.private_key, PrivateKeyEcc): + # key_size <= 256 => SHA256 + # 256 < key_size <= 384 => SHA384 + # 384 < key_size => SHA512 + if self.private_key.key_size <= 256: + hash_size = 256 + elif 256 < self.private_key.key_size <= 384: + hash_size = 384 + else: + hash_size = 512 + hash_alg_name = EnumHashAlgorithm.from_label(f"sha{hash_size}") + + else: + raise SPSDKError( + f"Unsupported private key by signature provider: {str(self.private_key)}" + ) + return get_hash_algorithm(hash_alg_name) + + @property + def signature_length(self) -> int: + """Return length of the signature.""" + return self.private_key.signature_size + + def verify_public_key(self, public_key: bytes) -> bool: + """Verify if given public key matches private key.""" + try: + return self.private_key.verify_public_key(PublicKeyEcc.parse(public_key)) + except SPSDKError: + pass + try: + return self.private_key.verify_public_key(PublicKeyRsa.parse(public_key)) + except SPSDKError: + pass + raise SPSDKError("Unsupported public key") + + def info(self) -> str: + """Return basic into about the signature provider.""" + msg = super().info() + msg += f"\nKey path: {self.file_path}\n" + return msg + + def sign(self, data: bytes) -> bytes: + """Return the signature for data.""" + return self.private_key.sign(data) + + +class InteractivePlainFileSP(PlainFileSP): + """SignatureProvider implementation that uses plain local file in an "interactive" mode. + + If the private key is encrypted, the user will be prompted for password + """ + + sp_type = "interactive_file" + + def __init__( # pylint: disable=super-init-not-called + self, + file_path: str, + hash_alg: Optional[EnumHashAlgorithm] = None, + search_paths: Optional[List[str]] = None, + ) -> None: + """Initialize the interactive plain file signature provider. + + :param file_path: Path to private file + :param hash_alg: Hash for the signature, defaults to sha256 + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Invalid Private Key + """ + self.file_path = find_file(file_path=file_path, search_paths=search_paths) + try: + self.private_key = PrivateKey.load(self.file_path) + except SPSDKKeyPassphraseMissing: + password = prompt_for_passphrase() + self.private_key = PrivateKey.load(self.file_path, password=password) + self.hash_alg = self._get_hash_algorithm(hash_alg) + + +class HttpProxySP(SignatureProvider): + """Signature Provider implementation that delegates all operations to a proxy server.""" + + sp_type = "proxy" + reserved_keys = ["type", "search_paths", "data"] + + def __init__( + self, + host: str = "localhost", + port: str = "8000", + url_prefix: str = "api", + **kwargs: Dict[str, str], + ) -> None: + """Initialize Http Proxy Signature Provider. + + :param host: Hostname (IP address) of the proxy server, defaults to "localhost" + :param port: Port of the proxy server, defaults to "8000" + :param url_prefix: REST API prefix, defaults to "api" + """ + self.base_url = f"http://{host}:{port}/" + self.base_url += f"{url_prefix}/" if url_prefix else "" + self.kwargs = kwargs + + def _handle_request(self, url: str, data: Optional[Dict] = None) -> Dict: + """Handle REST API request. + + :param url: REST API endpoint URL + :param data: JSON payload data, defaults to None + :raises SPSDKError: HTTP Error during API request + :raises SPSDKError: Invalid response data (not a valid dictionary) + :return: REST API data response as dictionary + """ + json_payload = data or {} + json_payload.update(self.kwargs) + full_url = self.base_url + url + logger.info(f"Requesting: {full_url}") + response = requests.get(url=full_url, json=json_payload, timeout=60) + logger.info(f"Response: {response}") + if not response.ok: + try: + extra_message = response.json() + except json.JSONDecodeError: + extra_message = "N/A" + raise SPSDKError( + f"Error {response.status_code} ({response.reason}) occurred when calling {full_url}\n" + f"Extra response data: {extra_message}" + ) + try: + return response.json() + except json.JSONDecodeError as e: + raise SPSDKError("Response is not a valid JSON object") from e + + def _check_response( + self, response: Dict, names_types: List[Tuple[str, Type]] + ) -> None: + """Check if the response contains required data. + + :param response: Response to check + :param names_types: Name and type of required response members + :raises SPSDKError: Response doesn't contain required member + :raises SPSDKError: Responses' member has incorrect type + """ + for name, typ in names_types: + if name not in response: + raise SPSDKError(f"Response object doesn't contain member '{name}'") + if not isinstance(response[name], typ): + raise SPSDKError( + f"Response member '{name}' is not a instance of '{typ}' but '{type(response[name])}'" + ) + + def sign(self, data: bytes) -> bytes: + """Return signature for data.""" + response = self._handle_request("sign", {"data": data.hex()}) + self._check_response(response=response, names_types=[("data", str)]) + return bytes.fromhex(response["data"]) + + @property + def signature_length(self) -> int: + """Return length of the signature.""" + response = self._handle_request("signature_length") + self._check_response(response=response, names_types=[("data", int)]) + return int(response["data"]) + + def verify_public_key(self, public_key: bytes) -> bool: + """Verify if given public key matches private key.""" + response = self._handle_request("verify_public_key", {"data": public_key.hex()}) + self._check_response(response=response, names_types=[("data", bool)]) + return response["data"] + + +def get_signature_provider( + sp_cfg: Optional[str] = None, local_file_key: Optional[str] = None, **kwargs: Any +) -> SignatureProvider: + """Get the signature provider from configuration. + + :param sp_cfg: Configuration of signature provider. + :param local_file_key: Optional backward compatibility + option to specify just path to local private key. + :param kwargs: Additional parameters, that could be accepted by Signature providers. + :return: Signature Provider instance. + :raises SPSDKError: Invalid input configuration. + """ + if sp_cfg: + params: Dict[str, Union[str, List[str]]] = {} + params.update(SignatureProvider.convert_params(sp_cfg)) + for k, v in kwargs.items(): + if not k in params: + params[k] = v + signature_provider = SignatureProvider.create(params=params) + elif local_file_key: + signature_provider = InteractivePlainFileSP( + file_path=local_file_key, + search_paths=kwargs.get("search_paths"), + ) + else: + raise SPSDKValueError("No signature provider configuration is provided") + + if not signature_provider: + raise SPSDKError( + f"Cannot create signature provider from: {sp_cfg or local_file_key}" + ) + + return signature_provider + + +def load_plugins() -> Dict[str, ModuleType]: + """Load all installed signature provider plugins.""" + plugins_manager = PluginsManager() + plugins_manager.load_from_entrypoints(PluginType.SIGNATURE_PROVIDER.label) + return plugins_manager.plugins + + +def try_to_verify_public_key( + signature_provider: SignatureProvider, public_key_data: bytes +) -> None: + """Verify public key by signature provider if verify method is implemented. + + :param signature_provider: Signature provider used for verification. + :param public_key_data: Public key data to be verified. + :raises SPSDKUnsupportedOperation: The verify_public_key method si nto implemented + :raises SPSDKError: The verification of key-pair integrity failed + """ + try: + result = signature_provider.verify_public_key(public_key_data) + if not result: + raise SPSDKKeysNotMatchingError( + "Signature verification failed, public key does not match to private key" + ) + logger.debug( + "The verification of private key pair integrity has been successful." + ) + except SPSDKUnsupportedOperation: + logger.warning( + "Signature provider could not verify the integrity of private key pair." + ) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/symmetric.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/symmetric.py new file mode 100644 index 00000000..2f3324f1 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/symmetric.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""OpenSSL implementation for symmetric key encryption.""" + + +# Used security modules +from typing import Optional + +from cryptography.hazmat.primitives import keywrap +from cryptography.hazmat.primitives.ciphers import Cipher, aead, algorithms, modes + +from ..exceptions import SPSDKError +from ..utils.misc import Endianness, align_block + + +class Counter: + """AES counter with specified counter byte ordering and customizable increment.""" + + @property + def value(self) -> bytes: + """Initial vector for AES encryption.""" + return self._nonce + self._ctr.to_bytes(4, self._ctr_byteorder_encoding.value) + + def __init__( + self, + nonce: bytes, + ctr_value: Optional[int] = None, + ctr_byteorder_encoding: Endianness = Endianness.LITTLE, + ): + """Constructor. + + :param nonce: last four bytes are used as initial value for counter + :param ctr_value: counter initial value; it is added to counter value retrieved from nonce + :param ctr_byteorder_encoding: way how the counter is encoded into output value + :raises SPSDKError: When invalid byteorder is provided + """ + assert isinstance(nonce, bytes) and len(nonce) == 16 + self._nonce = nonce[:-4] + self._ctr_byteorder_encoding = ctr_byteorder_encoding + self._ctr = int.from_bytes(nonce[-4:], ctr_byteorder_encoding.value) + if ctr_value is not None: + self._ctr += ctr_value + + def increment(self, value: int = 1) -> None: + """Increment counter by specified value. + + :param value: to add to counter + """ + self._ctr += value + + +def aes_key_wrap(kek: bytes, key_to_wrap: bytes) -> bytes: + """Wraps a key using a key-encrypting key (KEK). + + :param kek: The key-encrypting key + :param key_to_wrap: Plain data + :return: Wrapped key + """ + return keywrap.aes_key_wrap(kek, key_to_wrap) + + +def aes_key_unwrap(kek: bytes, wrapped_key: bytes) -> bytes: + """Unwraps a key using a key-encrypting key (KEK). + + :param kek: The key-encrypting key + :param wrapped_key: Encrypted data + :return: Un-wrapped key + """ + return keywrap.aes_key_unwrap(kek, wrapped_key) + + +def aes_ecb_encrypt(key: bytes, plain_data: bytes) -> bytes: + """Encrypt plain data with AES in ECB mode. + + :param key: The key for data encryption + :param plain_data: Input data + :return: Encrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.ECB()) + enc = cipher.encryptor() + return enc.update(plain_data) + enc.finalize() + + +def aes_ecb_decrypt(key: bytes, encrypted_data: bytes) -> bytes: + """Decrypt encrypted data with AES in ECB mode. + + :param key: The key for data decryption + :param encrypted_data: Input data + :return: Decrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.ECB()) + enc = cipher.decryptor() + return enc.update(encrypted_data) + enc.finalize() + + +def aes_cbc_encrypt( + key: bytes, plain_data: bytes, iv_data: Optional[bytes] = None +) -> bytes: + """Encrypt plain data with AES in CBC mode. + + :param key: The key for data encryption + :param plain_data: Input data + :param iv_data: Initialization vector data + :raises SPSDKError: Invalid Key or IV + :return: Encrypted image + """ + if len(key) * 8 not in algorithms.AES.key_sizes: + raise SPSDKError( + "The key must be a valid AES key length: " + f"{', '.join([str(k) for k in algorithms.AES.key_sizes])}" + ) + init_vector = iv_data or bytes(algorithms.AES.block_size // 8) + if len(init_vector) * 8 != algorithms.AES.block_size: + raise SPSDKError( + f"The initial vector length must be {algorithms.AES.block_size // 8}" + ) + cipher = Cipher(algorithms.AES(key), modes.CBC(init_vector)) + enc = cipher.encryptor() + return ( + enc.update(align_block(plain_data, alignment=algorithms.AES.block_size // 8)) + + enc.finalize() + ) + + +def aes_cbc_decrypt( + key: bytes, encrypted_data: bytes, iv_data: Optional[bytes] = None +) -> bytes: + """Decrypt encrypted data with AES in CBC mode. + + :param key: The key for data decryption + :param encrypted_data: Input data + :param iv_data: Initialization vector data + :raises SPSDKError: Invalid Key or IV + :return: Decrypted image + """ + if len(key) * 8 not in algorithms.AES.key_sizes: + raise SPSDKError( + "The key must be a valid AES key length: " + f"{', '.join([str(k) for k in algorithms.AES.key_sizes])}" + ) + init_vector = iv_data or bytes(algorithms.AES.block_size) + if len(init_vector) * 8 != algorithms.AES.block_size: + raise SPSDKError( + f"The initial vector length must be {algorithms.AES.block_size}" + ) + cipher = Cipher(algorithms.AES(key), modes.CBC(init_vector)) + dec = cipher.decryptor() + return dec.update(encrypted_data) + dec.finalize() + + +def aes_ctr_encrypt(key: bytes, plain_data: bytes, nonce: bytes) -> bytes: + """Encrypt plain data with AES in CTR mode. + + :param key: The key for data encryption + :param plain_data: Input data + :param nonce: Nonce data with counter value + :return: Encrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.CTR(nonce)) + enc = cipher.encryptor() + return enc.update(plain_data) + enc.finalize() + + +def aes_ctr_decrypt(key: bytes, encrypted_data: bytes, nonce: bytes) -> bytes: + """Decrypt encrypted data with AES in CTR mode. + + :param key: The key for data decryption + :param encrypted_data: Input data + :param nonce: Nonce data with counter value + :return: Decrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.CTR(nonce)) + enc = cipher.decryptor() + return enc.update(encrypted_data) + enc.finalize() + + +def aes_xts_encrypt(key: bytes, plain_data: bytes, tweak: bytes) -> bytes: + """Encrypt plain data with AES in XTS mode. + + :param key: The key for data encryption + :param plain_data: Input data + :param tweak: The tweak is a 16 byte value + :return: Encrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.XTS(tweak)) + enc = cipher.encryptor() + return enc.update(plain_data) + enc.finalize() + + +def aes_xts_decrypt(key: bytes, encrypted_data: bytes, tweak: bytes) -> bytes: + """Decrypt encrypted data with AES in XTS mode. + + :param key: The key for data decryption + :param encrypted_data: Input data + :param tweak: The tweak is a 16 byte value + :return: Decrypted data + """ + cipher = Cipher(algorithms.AES(key), modes.XTS(tweak)) + enc = cipher.decryptor() + return enc.update(encrypted_data) + enc.finalize() + + +def aes_ccm_encrypt( + key: bytes, + plain_data: bytes, + nonce: bytes, + associated_data: bytes = b"", + tag_len: int = 16, +) -> bytes: + """Encrypt plain data with AES in CCM mode (Counter with CBC). + + :param key: The key for data encryption + :param plain_data: Input data + :param nonce: Nonce data with counter value + :param associated_data: Associated data - Unencrypted but authenticated + :param tag_len: Length of encryption tag + :return: Encrypted data + """ + aesccm = aead.AESCCM(key, tag_length=tag_len) + return aesccm.encrypt(nonce, plain_data, associated_data) + + +def aes_ccm_decrypt( + key: bytes, + encrypted_data: bytes, + nonce: bytes, + associated_data: bytes, + tag_len: int = 16, +) -> bytes: + """Decrypt encrypted data with AES in CCM mode (Counter with CBC). + + :param key: The key for data decryption + :param encrypted_data: Input data + :param nonce: Nonce data with counter value + :param associated_data: Associated data - Unencrypted but authenticated + :param tag_len: Length of encryption tag + :return: Decrypted data + """ + aesccm = aead.AESCCM(key, tag_length=tag_len) + return aesccm.decrypt(nonce, encrypted_data, associated_data) + + +def sm4_cbc_encrypt( + key: bytes, plain_data: bytes, iv_data: Optional[bytes] = None +) -> bytes: + """Encrypt plain data with SM4 in CBC mode. + + :param key: The key for data encryption + :param plain_data: Input data + :param iv_data: Initialization vector data + :raises SPSDKError: Invalid Key or IV + :return: Encrypted image + """ + if len(key) * 8 not in algorithms.SM4.key_sizes: + raise SPSDKError( + "The key must be a valid SM4 key length: " + f"{', '.join([str(k) for k in algorithms.SM4.key_sizes])}" + ) + init_vector = iv_data or bytes(algorithms.SM4.block_size // 8) + if len(init_vector) * 8 != algorithms.SM4.block_size: + raise SPSDKError( + f"The initial vector length must be {algorithms.SM4.block_size // 8}" + ) + cipher = Cipher(algorithms.SM4(key), modes.CBC(init_vector)) + enc = cipher.encryptor() + return ( + enc.update(align_block(plain_data, alignment=algorithms.SM4.block_size // 8)) + + enc.finalize() + ) + + +def sm4_cbc_decrypt( + key: bytes, encrypted_data: bytes, iv_data: Optional[bytes] = None +) -> bytes: + """Decrypt encrypted data with SM4 in CBC mode. + + :param key: The key for data decryption + :param encrypted_data: Input data + :param iv_data: Initialization vector data + :raises SPSDKError: Invalid Key or IV + :return: Decrypted image + """ + if len(key) * 8 not in algorithms.SM4.key_sizes: + raise SPSDKError( + "The key must be a valid SM4 key length: " + f"{', '.join([str(k) for k in algorithms.AES.key_sizes])}" + ) + init_vector = iv_data or bytes(algorithms.SM4.block_size) + if len(init_vector) * 8 != algorithms.SM4.block_size: + raise SPSDKError( + f"The initial vector length must be {algorithms.SM4.block_size}" + ) + cipher = Cipher(algorithms.SM4(key), modes.CBC(init_vector)) + dec = cipher.decryptor() + return dec.update(encrypted_data) + dec.finalize() diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/types.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/types.py new file mode 100644 index 00000000..ec5dd5b7 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/types.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Based crypto classes.""" +from typing import Dict + +from cryptography import utils +from cryptography.hazmat.primitives.serialization import Encoding +from cryptography.x509.base import Version +from cryptography.x509.extensions import Extensions, KeyUsage +from cryptography.x509.name import Name +from cryptography.x509.oid import ExtensionOID, NameOID, ObjectIdentifier + +from ..exceptions import SPSDKError + + +class SPSDKEncoding(utils.Enum): + """Extension of cryptography Encoders class.""" + + NXP = "NXP" + PEM = "PEM" + DER = "DER" + + @staticmethod + def get_cryptography_encodings(encoding: "SPSDKEncoding") -> Encoding: + """Get Encoding in cryptography class.""" + cryptography_encoding = { + SPSDKEncoding.PEM: Encoding.PEM, + SPSDKEncoding.DER: Encoding.DER, + }.get(encoding) + if cryptography_encoding is None: + raise SPSDKError(f"{encoding} format is not supported by cryptography.") + return cryptography_encoding + + @staticmethod + def get_file_encodings(data: bytes) -> "SPSDKEncoding": + """Get the encoding type out of given item from the data. + + :param data: Already loaded data file to determine the encoding style + :return: encoding type (Encoding.PEM, Encoding.DER) + """ + encoding = SPSDKEncoding.PEM + try: + decoded = data.decode("utf-8") + except UnicodeDecodeError: + encoding = SPSDKEncoding.DER + else: + if decoded.find("----") == -1: + encoding = SPSDKEncoding.DER + return encoding + + @staticmethod + def all() -> Dict[str, "SPSDKEncoding"]: + """Get all supported encodings.""" + return { + "NXP": SPSDKEncoding.NXP, + "PEM": SPSDKEncoding.PEM, + "DER": SPSDKEncoding.DER, + } + + +SPSDKExtensions = Extensions +SPSDKExtensionOID = ExtensionOID +SPSDKNameOID = NameOID +SPSDKKeyUsage = KeyUsage +SPSDKName = Name +SPSDKVersion = Version +SPSDKObjectIdentifier = ObjectIdentifier diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/crypto/utils.py b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/utils.py new file mode 100644 index 00000000..275af21b --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/crypto/utils.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""OpenSSL implementation for security backend.""" + +from typing import Iterable, List, Optional + +from ..crypto.certificate import Certificate +from ..crypto.keys import PrivateKey, PublicKey +from ..crypto.signature_provider import SignatureProvider +from ..exceptions import SPSDKError, SPSDKValueError +from ..utils.misc import load_binary + + +def get_matching_key_id( + public_keys: List[PublicKey], signature_provider: SignatureProvider +) -> int: + """Get index of public key that match to given private key. + + :param public_keys: List of public key used to find the match for the private key. + :param signature_provider: Signature provider used to try to match public key index. + :raises SPSDKValueError: No match found. + :return: Index of public key. + """ + for i, public_key in enumerate(public_keys): + if signature_provider.verify_public_key(public_key.export()): + return i + + raise SPSDKValueError("There is no match of private key in given list.") + + +def extract_public_key_from_data( + object_data: bytes, password: Optional[str] = None +) -> PublicKey: + """Extract any kind of public key from a data that contains Certificate, Private Key or Public Key. + + :raises SPSDKError: Raised when file can not be loaded + :return: private key of any type + """ + try: + return Certificate.parse(object_data).get_public_key() + except SPSDKError: + pass + + try: + return PrivateKey.parse( + object_data, password=password if password else None + ).get_public_key() + except SPSDKError: + pass + + try: + return PublicKey.parse(object_data) + except SPSDKError as exc: + raise SPSDKError("Unable to load secret data.") from exc + + +def extract_public_key( + file_path: str, + password: Optional[str] = None, + search_paths: Optional[List[str]] = None, +) -> PublicKey: + """Extract any kind of public key from a file that contains Certificate, Private Key or Public Key. + + :param file_path: File path to public key file. + :param password: Optional password for encrypted Private file source. + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Raised when file can not be loaded + :return: Public key of any type + """ + try: + object_data = load_binary(file_path, search_paths=search_paths) + return extract_public_key_from_data(object_data, password) + except SPSDKError as exc: + raise SPSDKError(f"Unable to load secret file '{file_path}'.") from exc + + +def extract_public_keys( + secret_files: Iterable[str], + password: Optional[str] = None, + search_paths: Optional[List[str]] = None, +) -> List[PublicKey]: + """Extract any kind of public key from files that contain Certificate, Private Key or Public Key. + + :param secret_files: List of file paths to public key files. + :param password: Optional password for encrypted Private file source. + :param search_paths: List of paths where to search for the file, defaults to None + :return: List of public keys of any type + """ + return [ + extract_public_key( + file_path=source, password=password, search_paths=search_paths + ) + for source in secret_files + ] diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/exceptions.py b/pynitrokey/trussed/bootloader/lpc55_upload/exceptions.py new file mode 100644 index 00000000..99a0e961 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/exceptions.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Base for SPSDK exceptions.""" +from typing import Optional + +####################################################################### +# # Secure Provisioning SDK Exceptions +####################################################################### + + +class SPSDKError(Exception): + """Secure Provisioning SDK Base Exception.""" + + fmt = "SPSDK: {description}" + + def __init__(self, desc: Optional[str] = None) -> None: + """Initialize the base SPSDK Exception.""" + super().__init__() + self.description = desc + + def __str__(self) -> str: + return self.fmt.format(description=self.description or "Unknown Error") + + +class SPSDKKeyError(SPSDKError, KeyError): + """SPSDK standard key error.""" + + +class SPSDKValueError(SPSDKError, ValueError): + """SPSDK standard value error.""" + + +class SPSDKTypeError(SPSDKError, TypeError): + """SPSDK standard type error.""" + + +class SPSDKIOError(SPSDKError, IOError): + """SPSDK standard IO error.""" + + +class SPSDKNotImplementedError(SPSDKError, NotImplementedError): + """SPSDK standard not implemented error.""" + + +class SPSDKLengthError(SPSDKError, ValueError): + """SPSDK parsing error of any AHAB containers. + + Input/output data must be of at least container declared length bytes long. + """ + + +class SPSDKOverlapError(SPSDKError, ValueError): + """Data overlap error.""" + + +class SPSDKAlignmentError(SPSDKError, ValueError): + """Data improperly aligned.""" + + +class SPSDKParsingError(SPSDKError): + """Cannot parse binary data.""" + + +class SPSDKCorruptedException(SPSDKError): + """Corrupted Exception.""" + + +class SPSDKUnsupportedOperation(SPSDKError): + """SPSDK unsupported operation error.""" + + +class SPSDKSyntaxError(SyntaxError, SPSDKError): + """SPSDK syntax error.""" + + +class SPSDKFileNotFoundError(FileNotFoundError, SPSDKError): + """SPSDK file not found error.""" + + +class SPSDKAttributeError(SPSDKError, AttributeError): + """SPSDK standard attribute error.""" + + +class SPSDKConnectionError(SPSDKError, ConnectionError): + """SPSDK standard connection error.""" + + +class SPSDKIndexError(SPSDKError, IndexError): + """SPSDK standard index error.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/__init__.py new file mode 100644 index 00000000..b74e48fb --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module implementing communication with the MCU Bootloader.""" +# diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/commands.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/commands.py new file mode 100644 index 00000000..063733a1 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/commands.py @@ -0,0 +1,522 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Commands and responses used by MBOOT module.""" + +from struct import pack, unpack, unpack_from +from typing import Dict, List, Optional, Type + +from ..utils.interfaces.commands import CmdPacketBase, CmdResponseBase +from ..utils.spsdk_enum import SpsdkEnum +from .error_codes import StatusCode +from .exceptions import McuBootError + +######################################################################################################################## +# McuBoot Commands and Responses Tags +######################################################################################################################## + +# fmt: off +class CommandTag(SpsdkEnum): + """McuBoot Commands.""" + + NO_COMMAND = (0x00, "NoCommand", "No Command") + FLASH_ERASE_ALL = (0x01, "FlashEraseAll", "Erase Complete Flash") + FLASH_ERASE_REGION = (0x02, "FlashEraseRegion", "Erase Flash Region") + READ_MEMORY = (0x03, "ReadMemory", "Read Memory") + WRITE_MEMORY = (0x04, "WriteMemory", "Write Memory") + FILL_MEMORY = (0x05, "FillMemory", "Fill Memory") + FLASH_SECURITY_DISABLE = (0x06, "FlashSecurityDisable", "Disable Flash Security") + GET_PROPERTY = (0x07, "GetProperty", "Get Property") + RECEIVE_SB_FILE = (0x08, "ReceiveSBFile", "Receive SB File") + EXECUTE = (0x09, "Execute", "Execute") + CALL = (0x0A, "Call", "Call") + RESET = (0x0B, "Reset", "Reset MCU") + SET_PROPERTY = (0x0C, "SetProperty", "Set Property") + FLASH_ERASE_ALL_UNSECURE = (0x0D, "FlashEraseAllUnsecure", "Erase Complete Flash and Unlock") + FLASH_PROGRAM_ONCE = (0x0E, "FlashProgramOnce", "Flash Program Once") + FLASH_READ_ONCE = (0x0F, "FlashReadOnce", "Flash Read Once") + FLASH_READ_RESOURCE = (0x10, "FlashReadResource", "Flash Read Resource") + CONFIGURE_MEMORY = (0x11, "ConfigureMemory", "Configure Quad-SPI Memory") + RELIABLE_UPDATE = (0x12, "ReliableUpdate", "Reliable Update") + GENERATE_KEY_BLOB = (0x13, "GenerateKeyBlob", "Generate Key Blob") + FUSE_PROGRAM = (0x14, "ProgramFuse", "Program Fuse") + KEY_PROVISIONING = (0x15, "KeyProvisioning", "Key Provisioning") + TRUST_PROVISIONING = (0x16, "TrustProvisioning", "Trust Provisioning") + FUSE_READ = (0x17, "ReadFuse", "Read Fuse") + UPDATE_LIFE_CYCLE = (0x18, "UpdateLifeCycle", "Update Life Cycle") + ELE_MESSAGE = (0x19, "EleMessage", "Send EdgeLock Enclave Message") + + # reserved commands + CONFIGURE_I2C = (0xC1, "ConfigureI2c", "Configure I2C") + CONFIGURE_SPI = (0xC2, "ConfigureSpi", "Configure SPI") + CONFIGURE_CAN = (0xC3, "ConfigureCan", "Configure CAN") + + + +class CommandFlag(SpsdkEnum): + """Flags for McuBoot commands.""" + + NONE = (0, "NoFlags", "No flags specified") + HAS_DATA_PHASE = (1, "DataPhase", "Command has a data phase") + + + +class ResponseTag(SpsdkEnum): + """McuBoot Responses to Commands.""" + + GENERIC = (0xA0, "GenericResponse", "Generic Response") + READ_MEMORY = (0xA3, "ReadMemoryResponse", "Read Memory Response") + GET_PROPERTY = (0xA7, "GetPropertyResponse", "Get Property Response") + FLASH_READ_ONCE = (0xAF, "FlashReadOnceResponse", "Flash Read Once Response") + FLASH_READ_RESOURCE = (0xB0, "FlashReadResourceResponse", "Flash Read Resource Response") + KEY_BLOB_RESPONSE = (0xB3, "CreateKeyBlobResponse", "Create Key Blob") + KEY_PROVISIONING_RESPONSE = (0xB5, "KeyProvisioningResponse", "Key Provisioning Response") + TRUST_PROVISIONING_RESPONSE = (0xB6, "TrustProvisioningResponse", "Trust Provisioning Response") + + +class KeyProvOperation(SpsdkEnum): + """Type of key provisioning operation.""" + + ENROLL = (0, "Enroll", "Enroll Operation") + SET_USER_KEY = (1, "SetUserKey", "Set User Key Operation") + SET_INTRINSIC_KEY = (2, "SetIntrinsicKey", "Set Intrinsic Key Operation") + WRITE_NON_VOLATILE = (3, "WriteNonVolatile", "Write Non Volatile Operation") + READ_NON_VOLATILE = (4, "ReadNonVolatile", "Read Non Volatile Operation") + WRITE_KEY_STORE = (5, "WriteKeyStore", "Write Key Store Operation") + READ_KEY_STORE = (6, "ReadKeyStore", "Read Key Store Operation") + + +class KeyProvUserKeyType(SpsdkEnum): + """Enumeration of supported user keys in PUF. Keys are SoC specific, not all will be supported for the processor.""" + + OTFADKEK = (2, "OTFADKEK", "Key for OTFAD encryption") + SBKEK = (3, "SBKEK", "Key for SB file encryption") + PRINCE_REGION_0 = (7, "PRINCE0", "Key for Prince region 0") + PRINCE_REGION_1 = (8, "PRINCE1", "Key for Prince region 1") + PRINCE_REGION_2 = (9, "PRINCE2", "Key for Prince region 2") + PRINCE_REGION_3 = (10, "PRINCE3", "Key for Prince region 3") + + USERKEK = (11, "USERKEK", "Encrypted boot image key") + UDS = (12, "UDS", "Universal Device Secret for DICE") + + +class GenerateKeyBlobSelect(SpsdkEnum): + """Key selector for the generate-key-blob function. + + For devices with SNVS, valid options of [key_sel] are + 0, 1 or OTPMK: OTPMK from FUSE or OTP(default), + 2 or ZMK: ZMK from SNVS, + 3 or CMK: CMK from SNVS, + For devices without SNVS, this option will be ignored. + """ + + OPTMK = (0, "OPTMK", "OTPMK from FUSE or OTP(default)") + ZMK = (2, "ZMK", "ZMK from SNVS") + CMK = (3, "CMK", "CMK from SNVS") + + +class TrustProvOperation(SpsdkEnum): + """Operations supported by Trust Provisioning flow.""" + + PROVE_GENUINITY = (0xF4, "ProveGenuinity", "Start the proving genuinity process") + ISP_SET_WRAPPED_DATA = (0xF0, "SetWrappedData", "Start processing Wrapped data") + """Type of trust provisioning operation.""" + + OEM_GEN_MASTER_SHARE = (0, "OemGenMasterShare", "Enroll Operation") + OEM_SET_MASTER_SHARE = (1, "SetUserKey", "Set User Key Operation") + OEM_GET_CUST_CERT_DICE_PUK = (2, "SetIntrinsicKey", "Set Intrinsic Key Operation") + HSM_GEN_KEY = (3, "HsmGenKey", "HSM gen key") + HSM_STORE_KEY = (4, "HsmStoreKey", "HSM store key") + HSM_ENC_BLOCK = (5, "HsmEncBlock", "HSM Enc block") + HSM_ENC_SIGN = (6, "HsnEncSign", "HSM enc sign") + + +class TrustProvOemKeyType(SpsdkEnum): + """Type of oem key type definition.""" + + MFWISK = (0xC3A5, "MFWISK", "ECDSA Manufacturing Firmware Signing Key") + MFWENCK = (0xA5C3, "MFWENCK", "CKDF Master Key for Manufacturing Firmware Encryption Key") + GENSIGNK = (0x5A3C, "GENSIGNK", "Generic ECDSA Signing Key") + GETCUSTMKSK = (0x3C5A, "GETCUSTMKSK", "CKDF Master Key for Production Firmware Encryption Key") + + +class TrustProvKeyType(SpsdkEnum): + """Type of key type definition.""" + + CKDFK = (1, "CKDFK", "CKDF Master Key") + HKDFK = (2, "HKDFK", "HKDF Master Key") + HMACK = (3, "HMACK", "HMAC Key") + CMACK = (4, "CMACK", "CMAC Key") + AESK = (5, "AESK", "AES Key") + KUOK = (6, "KUOK", "Key Unwrap Only Key") + + +class TrustProvWrappingKeyType(SpsdkEnum): + """Type of wrapping key type definition.""" + + INT_SK = (0x10, "INT_SK", "The wrapping key for wrapping of MFG_CUST_MK_SK0_BLOB") + EXT_SK = (0x11, "EXT_SK", "The wrapping key for wrapping of MFG_CUST_MK_SK0_BLOB") + +class TrustProvWpc(SpsdkEnum): + """Type of WPC trusted facility commands for DSC.""" + + WPC_GET_ID = (0x5000000, "wpc_get_id", "WPC get ID") + NXP_GET_ID = (0x5000001, "nxp_get_id", "NXP get ID") + WPC_INSERT_CERT = (0x5000002, "wpc_insert_cert", "WPC insert certificate") + WPC_SIGN_CSR = (0x5000003, "wpc_sign_csr", "WPC sign CSR") + +class TrustProvDevHsmDsc(SpsdkEnum): + """Type of DSC Device HSM.""" + + DSC_HSM_CREATE_SESSION = (0x6000000, "dsc_hsm_create_session", "DSC HSM create session") + DSC_HSM_ENC_BLK = (0x6000001, "dsc_hsm_enc_blk", "DSC HSM encrypt bulk") + DSC_HSM_ENC_SIGN = (0x6000002, "dsc_hsm_enc_sign", "DSC HSM sign") + +# fmt: on + +######################################################################################################################## +# McuBoot Command and Response packet classes +######################################################################################################################## + + +class CmdHeader: + """McuBoot command/response header.""" + + SIZE = 4 + + def __init__(self, tag: int, flags: int, reserved: int, params_count: int) -> None: + """Initialize the Command Header. + + :param tag: Tag indicating the command, see: `CommandTag` class + :param flags: Flags for the command + :param reserved: Reserved? + :param params_count: Number of parameter for the command + """ + self.tag = tag + self.flags = flags + self.reserved = reserved + self.params_count = params_count + + def __eq__(self, obj: object) -> bool: + return isinstance(obj, CmdHeader) and vars(obj) == vars(self) + + def __ne__(self, obj: object) -> bool: + return not self.__eq__(obj) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return ( + f"CmdHeader(tag=0x{self.tag:02X}, flags=0x{self.flags:02X}, " + f"reserved={self.reserved}, params_count={self.params_count})" + ) + + def to_bytes(self) -> bytes: + """Serialize header into bytes.""" + return pack("4B", self.tag, self.flags, self.reserved, self.params_count) + + @classmethod + def from_bytes(cls, data: bytes, offset: int = 0) -> "CmdHeader": + """Deserialize header from bytes. + + :param data: Input data in bytes + :param offset: The offset of input data + :return: De-serialized CmdHeader object + :raises McuBootError: Invalid data format + """ + if len(data) < 4: + raise McuBootError( + f"Invalid format of RX packet (data length is {len(data)} bytes)" + ) + return cls(*unpack_from("4B", data, offset)) + + +class CmdPacket(CmdPacketBase): + """McuBoot command packet format class.""" + + SIZE = 32 + EMPTY_VALUE = 0x00 + + def __init__( + self, tag: CommandTag, flags: int, *args: int, data: Optional[bytes] = None + ) -> None: + """Initialize the Command Packet object. + + :param tag: Tag identifying the command + :param flags: Flags used by the command + :param args: Arguments used by the command + :param data: Additional data, defaults to None + """ + self.header = CmdHeader(tag.tag, flags, 0, len(args)) + self.params = list(args) + if data is not None: + if len(data) % 4: + data += b"\0" * (4 - len(data) % 4) + self.params.extend(unpack_from(f"<{len(data) // 4}I", data)) + self.header.params_count = len(self.params) + + def __eq__(self, obj: object) -> bool: + return isinstance(obj, CmdPacket) and vars(obj) == vars(self) + + def __ne__(self, obj: object) -> bool: + return not self.__eq__(obj) + + def __str__(self) -> str: + """Get object info.""" + tag = ( + CommandTag.get_label(self.header.tag) + if self.header.tag in CommandTag.tags() + else f"0x{self.header.tag:02X}" + ) + return f"Tag={tag}, Flags=0x{self.header.flags:02X}" + "".join( + f", P[{n}]=0x{param:08X}" for n, param in enumerate(self.params) + ) + + def to_bytes(self, padding: bool = True) -> bytes: + """Serialize CmdPacket into bytes. + + :param padding: If True, add padding to specific size + :return: Serialized object into bytes + """ + self.header.params_count = len(self.params) + data = self.header.to_bytes() + data += pack(f"<{self.header.params_count}I", *self.params) + if padding and len(data) < self.SIZE: + data += bytes([self.EMPTY_VALUE] * (self.SIZE - len(data))) + return data + + +class CmdResponse(CmdResponseBase): + """McuBoot response base format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Command Response object. + + :param header: Header for the response + :param raw_data: Response data + """ + assert isinstance(header, CmdHeader) + assert isinstance(raw_data, (bytes, bytearray)) + self.header = header + self.raw_data = raw_data + (status,) = unpack_from(" int: + """Return a integer representation of the response.""" + return unpack_from(">I", self.raw_data)[0] + + def _get_status_label(self) -> str: + return ( + StatusCode.get_label(self.status) + if self.status in StatusCode.tags() + else f"Unknown[0x{self.status:08X}]" + ) + + def __eq__(self, obj: object) -> bool: + return isinstance(obj, CmdResponse) and vars(obj) == vars(self) + + def __ne__(self, obj: object) -> bool: + return not self.__eq__(obj) + + def __str__(self) -> str: + """Get object info.""" + return ( + f"Tag=0x{self.header.tag:02X}, Flags=0x{self.header.flags:02X}" + + " [" + + ", ".join(f"{b:02X}" for b in self.raw_data) + + "]" + ) + + +class GenericResponse(CmdResponse): + """McuBoot generic response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Generic response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, tag = unpack_from("<2I", raw_data) + self.cmd_tag: int = tag + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + cmd = ( + CommandTag.get_label(self.cmd_tag) + if self.cmd_tag in CommandTag.tags() + else f"Unknown[0x{self.cmd_tag:02X}]" + ) + return f"Tag={tag}, Status={status}, Cmd={cmd}" + + +class GetPropertyResponse(CmdResponse): + """McuBoot get property response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Get-Property response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, *values = unpack_from(f"<{self.header.params_count}I", raw_data) + self.values: List[int] = list(values) + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}" + "".join( + f", v{n}=0x{value:08X}" for n, value in enumerate(self.values) + ) + + +class ReadMemoryResponse(CmdResponse): + """McuBoot read memory response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Read-Memory response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, length = unpack_from("<2I", raw_data) + self.length: int = length + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}, Length={self.length}" + + +class FlashReadOnceResponse(CmdResponse): + """McuBoot flash read once response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Flash-Read-Once response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, length, *values = unpack_from(f"<{self.header.params_count}I", raw_data) + self.length: int = length + self.values: List[int] = list(values) + self.data = raw_data[8 : 8 + self.length] if self.length > 0 else b"" + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}, Length={self.length}" + + +class FlashReadResourceResponse(CmdResponse): + """McuBoot flash read resource response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Flash-Read-Resource response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, length = unpack_from("<2I", raw_data) + self.length: int = length + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}, Length={self.length}" + + +class KeyProvisioningResponse(CmdResponse): + """McuBoot Key Provisioning response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Key-Provisioning response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, length = unpack_from("<2I", raw_data) + self.length: int = length + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}, Length={self.length}" + + +class TrustProvisioningResponse(CmdResponse): + """McuBoot Trust Provisioning response format class.""" + + def __init__(self, header: CmdHeader, raw_data: bytes) -> None: + """Initialize the Trust-Provisioning response object. + + :param header: Header for the response + :param raw_data: Response data + """ + super().__init__(header, raw_data) + _, *values = unpack(f"<{self.header.params_count}I", raw_data) + self.values: List[int] = list(values) + + def __str__(self) -> str: + """Get object info.""" + tag = ResponseTag.get_label(self.header.tag) + status = self._get_status_label() + return f"Tag={tag}, Status={status}" + + +class NoResponse(CmdResponse): + """Special internal case when no response is provided by the target.""" + + def __init__(self, cmd_tag: int) -> None: + """Create a NoResponse to an command that was issued, indicated by its tag. + + :param cmd_tag: Tag of the command that preceded the no-response from target + """ + header = CmdHeader(tag=cmd_tag, flags=0, reserved=0, params_count=0) + raw_data = pack(" CmdResponse: + """Parse command response. + + :param data: Input data in bytes + :param offset: The offset of input data + :return: De-serialized object from data + """ + known_response: Dict[int, Type[CmdResponse]] = { + ResponseTag.GENERIC.tag: GenericResponse, + ResponseTag.GET_PROPERTY.tag: GetPropertyResponse, + ResponseTag.READ_MEMORY.tag: ReadMemoryResponse, + ResponseTag.FLASH_READ_RESOURCE.tag: FlashReadResourceResponse, + ResponseTag.FLASH_READ_ONCE.tag: FlashReadOnceResponse, + ResponseTag.KEY_BLOB_RESPONSE.tag: ReadMemoryResponse, + ResponseTag.KEY_PROVISIONING_RESPONSE.tag: KeyProvisioningResponse, + ResponseTag.TRUST_PROVISIONING_RESPONSE.tag: TrustProvisioningResponse, + } + header = CmdHeader.from_bytes(data, offset) + if header.tag in known_response: + return known_response[header.tag](header, data[CmdHeader.SIZE :]) + + return CmdResponse(header, data[CmdHeader.SIZE :]) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/error_codes.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/error_codes.py new file mode 100644 index 00000000..2b2325f3 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/error_codes.py @@ -0,0 +1,352 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Status and error codes used by the MBoot protocol.""" + +from ..utils.spsdk_enum import SpsdkEnum + +######################################################################################################################## +# McuBoot Status Codes (Errors) +######################################################################################################################## + +# pylint: disable=line-too-long +# fmt: off +class StatusCode(SpsdkEnum): + """McuBoot status codes.""" + + SUCCESS = (0, "Success", "Success") + FAIL = (1, "Fail", "Fail") + READ_ONLY = (2, "ReadOnly", "Read Only Error") + OUT_OF_RANGE = (3, "OutOfRange", "Out Of Range Error") + INVALID_ARGUMENT = (4, "InvalidArgument", "Invalid Argument Error") + TIMEOUT = (5, "TimeoutError", "Timeout Error") + NO_TRANSFER_IN_PROGRESS = (6, "NoTransferInProgress", "No Transfer In Progress Error") + + # Flash driver errors. + FLASH_SIZE_ERROR = (100, "FlashSizeError", "FLASH Driver: Size Error") + FLASH_ALIGNMENT_ERROR = (101, "FlashAlignmentError", "FLASH Driver: Alignment Error") + FLASH_ADDRESS_ERROR = (102, "FlashAddressError", "FLASH Driver: Address Error") + FLASH_ACCESS_ERROR = (103, "FlashAccessError", "FLASH Driver: Access Error") + FLASH_PROTECTION_VIOLATION = (104, "FlashProtectionViolation", "FLASH Driver: Protection Violation") + FLASH_COMMAND_FAILURE = (105, "FlashCommandFailure", "FLASH Driver: Command Failure") + FLASH_UNKNOWN_PROPERTY = (106, "FlashUnknownProperty", "FLASH Driver: Unknown Property") + FLASH_ERASE_KEY_ERROR = (107, "FlashEraseKeyError", "FLASH Driver: Provided Key Does Not Match Programmed Flash Memory Key") + FLASH_REGION_EXECUTE_ONLY = (108, "FlashRegionExecuteOnly", "FLASH Driver: Region Execute Only") + FLASH_EXEC_IN_RAM_NOT_READY = (109, "FlashExecuteInRamFunctionNotReady", "FLASH Driver: Execute In RAM Function Not Ready") + FLASH_COMMAND_NOT_SUPPORTED = (111, "FlashCommandNotSupported", "FLASH Driver: Command Not Supported") + FLASH_READ_ONLY_PROPERTY = (112, "FlashReadOnlyProperty", "FLASH Driver: Flash Memory Property Is Read-Only") + FLASH_INVALID_PROPERTY_VALUE = (113, "FlashInvalidPropertyValue", "FLASH Driver: Flash Memory Property Value Out Of Range") + FLASH_INVALID_SPECULATION_OPTION = (114, "FlashInvalidSpeculationOption", "FLASH Driver: Flash Memory Prefetch Speculation Option Is Invalid") + FLASH_ECC_ERROR = (116, "FlashEccError", "FLASH Driver: ECC Error") + FLASH_COMPARE_ERROR = (117, "FlashCompareError", "FLASH Driver: Destination And Source Memory Contents Do Not Match") + FLASH_REGULATION_LOSS = (118, "FlashRegulationLoss", "FLASH Driver: Loss Of Regulation During Read") + FLASH_INVALID_WAIT_STATE_CYCLES = (119, "FlashInvalidWaitStateCycles", "FLASH Driver: Wait State Cycle Set To Read/Write Mode Is Invalid") + FLASH_OUT_OF_DATE_CFPA_PAGE = (132, "FlashOutOfDateCfpaPage", "FLASH Driver: Out Of Date CFPA Page") + FLASH_BLANK_IFR_PAGE_DATA = (133, "FlashBlankIfrPageData", "FLASH Driver: Blank IFR Page Data") + FLASH_ENCRYPTED_REGIONS_ERASE_NOT_DONE_AT_ONCE = (134, "FlashEncryptedRegionsEraseNotDoneAtOnce", "FLASH Driver: Encrypted Regions Erase Not Done At Once") + FLASH_PROGRAM_VERIFICATION_NOT_ALLOWED = (135, "FlashProgramVerificationNotAllowed", "FLASH Driver: Program Verification Not Allowed") + FLASH_HASH_CHECK_ERROR = (136, "FlashHashCheckError", "FLASH Driver: Hash Check Error") + FLASH_SEALED_PFR_REGION = (137, "FlashSealedPfrRegion", "FLASH Driver: Sealed PFR Region") + FLASH_PFR_REGION_WRITE_BROKEN = (138, "FlashPfrRegionWriteBroken", "FLASH Driver: PFR Region Write Broken") + FLASH_NMPA_UPDATE_NOT_ALLOWED = (139, "FlashNmpaUpdateNotAllowed", "FLASH Driver: NMPA Update Not Allowed") + FLASH_CMPA_CFG_DIRECT_ERASE_NOT_ALLOWED = (140, "FlashCmpaCfgDirectEraseNotAllowed", "FLASH Driver: CMPA Cfg Direct Erase Not Allowed") + FLASH_PFR_BANK_IS_LOCKED = (141, "FlashPfrBankIsLocked", "FLASH Driver: PFR Bank Is Locked") + FLASH_CFPA_SCRATCH_PAGE_INVALID = (148, "FlashCfpaScratchPageInvalid", "FLASH Driver: CFPA Scratch Page Invalid") + FLASH_CFPA_VERSION_ROLLBACK_DISALLOWED = (149, "FlashCfpaVersionRollbackDisallowed", "FLASH Driver: CFPA Version Rollback Disallowed") + FLASH_READ_HIDING_AREA_DISALLOWED = (150, "FlashReadHidingAreaDisallowed", "FLASH Driver: Flash Memory Hiding Read Not Allowed") + FLASH_MODIFY_PROTECTED_AREA_DISALLOWED = (151, "FlashModifyProtectedAreaDisallowed", "FLASH Driver: Flash Firewall Page Locked Erase And Program Are Not Allowed") + FLASH_COMMAND_OPERATION_IN_PROGRESS = (152, "FlashCommandOperationInProgress", "FLASH Driver: Flash Memory State Busy Flash Memory Command Is In Progress") + + # I2C driver errors. + I2C_SLAVE_TX_UNDERRUN = (200, "I2cSlaveTxUnderrun", "I2C Driver: Slave Tx Underrun") + I2C_SLAVE_RX_OVERRUN = (201, "I2cSlaveRxOverrun", "I2C Driver: Slave Rx Overrun") + I2C_ARBITRATION_LOST = (202, "I2cArbitrationLost", "I2C Driver: Arbitration Lost") + + # SPI driver errors. + SPI_SLAVE_TX_UNDERRUN = (300, "SpiSlaveTxUnderrun", "SPI Driver: Slave Tx Underrun") + SPI_SLAVE_RX_OVERRUN = (301, "SpiSlaveRxOverrun", "SPI Driver: Slave Rx Overrun") + + # QuadSPI driver errors. + QSPI_FLASH_SIZE_ERROR = (400, "QspiFlashSizeError", "QSPI Driver: Flash Size Error") + QSPI_FLASH_ALIGNMENT_ERROR = (401, "QspiFlashAlignmentError", "QSPI Driver: Flash Alignment Error") + QSPI_FLASH_ADDRESS_ERROR = (402, "QspiFlashAddressError", "QSPI Driver: Flash Address Error") + QSPI_FLASH_COMMAND_FAILURE = (403, "QspiFlashCommandFailure", "QSPI Driver: Flash Command Failure") + QSPI_FLASH_UNKNOWN_PROPERTY = (404, "QspiFlashUnknownProperty", "QSPI Driver: Flash Unknown Property") + QSPI_NOT_CONFIGURED = (405, "QspiNotConfigured", "QSPI Driver: Not Configured") + QSPI_COMMAND_NOT_SUPPORTED = (406, "QspiCommandNotSupported", "QSPI Driver: Command Not Supported") + QSPI_COMMAND_TIMEOUT = (407, "QspiCommandTimeout", "QSPI Driver: Command Timeout") + QSPI_WRITE_FAILURE = (408, "QspiWriteFailure", "QSPI Driver: Write Failure") + + # OTFAD driver errors. + OTFAD_SECURITY_VIOLATION = (500, "OtfadSecurityViolation", "OTFAD Driver: Security Violation") + OTFAD_LOGICALLY_DISABLED = (501, "OtfadLogicallyDisabled", "OTFAD Driver: Logically Disabled") + OTFAD_INVALID_KEY = (502, "OtfadInvalidKey", "OTFAD Driver: Invalid Key") + OTFAD_INVALID_KEY_BLOB = (503, "OtfadInvalidKeyBlob", "OTFAD Driver: Invalid Key Blob") + + # Sending errors. + SENDING_OPERATION_CONDITION_ERROR = (1812, "SendOperationConditionError", "Send Operation Condition failed") + + # SDMMC driver errors. + + # FlexSPI statuses. + FLEXSPI_SEQUENCE_EXECUTION_TIMEOUT_RT5xx = (6000, "FLEXSPI_SequenceExecutionTimeout_RT5xx", "FLEXSPI: Sequence Execution Timeout") + FLEXSPI_INVALID_SEQUENCE_RT5xx = (6001, "FLEXSPI_InvalidSequence_RT5xx", "FLEXSPI: Invalid Sequence") + FLEXSPI_DEVICE_TIMEOUT_RT5xx = (6002, "FLEXSPI_DeviceTimeout_RT5xx", "FLEXSPI: Device Timeout") + FLEXSPI_SEQUENCE_EXECUTION_TIMEOUT = (7000, "FLEXSPI_SequenceExecutionTimeout", "FLEXSPI: Sequence Execution Timeout") + FLEXSPI_INVALID_SEQUENCE = (7001, "FLEXSPI_InvalidSequence", "FLEXSPI: Invalid Sequence") + FLEXSPI_DEVICE_TIMEOUT = (7002, "FLEXSPI_DeviceTimeout", "FLEXSPI: Device Timeout") + + # Bootloader errors. + UNKNOWN_COMMAND = (10000, "UnknownCommand", "Unknown Command") + SECURITY_VIOLATION = (10001, "SecurityViolation", "Security Violation") + ABORT_DATA_PHASE = (10002, "AbortDataPhase", "Abort Data Phase") + PING_ERROR = (10003, "PingError", "Ping Error") + NO_RESPONSE = (10004, "NoResponse", "No response packet from target device") + NO_RESPONSE_EXPECTED = (10005, "NoResponseExpected", "No Response Expected") + UNSUPPORTED_COMMAND = (10006, "UnsupportedCommand", "Unsupported Command") + + # SB loader errors. + ROMLDR_SECTION_OVERRUN = (10100, "RomLdrSectionOverrun", "ROM Loader: Section Overrun") + ROMLDR_SIGNATURE = (10101, "RomLdrSignature", "ROM Loader: Signature Error") + ROMLDR_SECTION_LENGTH = (10102, "RomLdrSectionLength", "ROM Loader: Section Length Error") + ROMLDR_UNENCRYPTED_ONLY = (10103, "RomLdrUnencryptedOnly", "ROM Loader: Unencrypted Only") + ROMLDR_EOF_REACHED = (10104, "RomLdrEOFReached", "ROM Loader: EOF Reached") + ROMLDR_CHECKSUM = (10105, "RomLdrChecksum", "ROM Loader: Checksum Error") + ROMLDR_CRC32_ERROR = (10106, "RomLdrCrc32Error", "ROM Loader: CRC32 Error") + ROMLDR_UNKNOWN_COMMAND = (10107, "RomLdrUnknownCommand", "ROM Loader: Unknown Command") + ROMLDR_ID_NOT_FOUND = (10108, "RomLdrIdNotFound", "ROM Loader: ID Not Found") + ROMLDR_DATA_UNDERRUN = (10109, "RomLdrDataUnderrun", "ROM Loader: Data Underrun") + ROMLDR_JUMP_RETURNED = (10110, "RomLdrJumpReturned", "ROM Loader: Jump Returned") + ROMLDR_CALL_FAILED = (10111, "RomLdrCallFailed", "ROM Loader: Call Failed") + ROMLDR_KEY_NOT_FOUND = (10112, "RomLdrKeyNotFound", "ROM Loader: Key Not Found") + ROMLDR_SECURE_ONLY = (10113, "RomLdrSecureOnly", "ROM Loader: Secure Only") + ROMLDR_RESET_RETURNED = (10114, "RomLdrResetReturned", "ROM Loader: Reset Returned") + ROMLDR_ROLLBACK_BLOCKED = (10115, "RomLdrRollbackBlocked", "ROM Loader: Rollback Blocked") + ROMLDR_INVALID_SECTION_MAC_COUNT = (10116, "RomLdrInvalidSectionMacCount", "ROM Loader: Invalid Section Mac Count") + ROMLDR_UNEXPECTED_COMMAND = (10117, "RomLdrUnexpectedCommand", "ROM Loader: Unexpected Command") + ROMLDR_BAD_SBKEK = (10118, "RomLdrBadSBKEK", "ROM Loader: Bad SBKEK Detected") + ROMLDR_PENDING_JUMP_COMMAND = (10119, "RomLdrPendingJumpCommand", "ROM Loader: Pending Jump Command") + + # Memory interface errors. + MEMORY_RANGE_INVALID = (10200, "MemoryRangeInvalid", "Memory Range Invalid") + MEMORY_READ_FAILED = (10201, "MemoryReadFailed", "Memory Read Failed") + MEMORY_WRITE_FAILED = (10202, "MemoryWriteFailed", "Memory Write Failed") + MEMORY_CUMULATIVE_WRITE = (10203, "MemoryCumulativeWrite", "Memory Cumulative Write") + MEMORY_APP_OVERLAP_WITH_EXECUTE_ONLY_REGION = (10204, "MemoryAppOverlapWithExecuteOnlyRegion", "Memory App Overlap with exec region") + MEMORY_NOT_CONFIGURED = (10205, "MemoryNotConfigured", "Memory Not Configured") + MEMORY_ALIGNMENT_ERROR = (10206, "MemoryAlignmentError", "Memory Alignment Error") + MEMORY_VERIFY_FAILED = (10207, "MemoryVerifyFailed", "Memory Verify Failed") + MEMORY_WRITE_PROTECTED = (10208, "MemoryWriteProtected", "Memory Write Protected") + MEMORY_ADDRESS_ERROR = (10209, "MemoryAddressError", "Memory Address Error") + MEMORY_BLANK_CHECK_FAILED = (10210, "MemoryBlankCheckFailed", "Memory Black Check Failed") + MEMORY_BLANK_PAGE_READ_DISALLOWED = (10211, "MemoryBlankPageReadDisallowed", "Memory Blank Page Read Disallowed") + MEMORY_PROTECTED_PAGE_READ_DISALLOWED = (10212, "MemoryProtectedPageReadDisallowed", "Memory Protected Page Read Disallowed") + MEMORY_PFR_SPEC_REGION_WRITE_BROKEN = (10213, "MemoryPfrSpecRegionWriteBroken", "Memory PFR Spec Region Write Broken") + MEMORY_UNSUPPORTED_COMMAND = (10214, "MemoryUnsupportedCommand", "Memory Unsupported Command") + + # Property store errors. + UNKNOWN_PROPERTY = (10300, "UnknownProperty", "Unknown Property") + READ_ONLY_PROPERTY = (10301, "ReadOnlyProperty", "Read Only Property") + INVALID_PROPERTY_VALUE = (10302, "InvalidPropertyValue", "Invalid Property Value") + + # Property store errors. + APP_CRC_CHECK_PASSED = (10400, "AppCrcCheckPassed", "Application CRC Check: Passed") + APP_CRC_CHECK_FAILED = (10401, "AppCrcCheckFailed", "Application: CRC Check: Failed") + APP_CRC_CHECK_INACTIVE = (10402, "AppCrcCheckInactive", "Application CRC Check: Inactive") + APP_CRC_CHECK_INVALID = (10403, "AppCrcCheckInvalid", "Application CRC Check: Invalid") + APP_CRC_CHECK_OUT_OF_RANGE = (10404, "AppCrcCheckOutOfRange", "Application CRC Check: Out Of Range") + + # Packetizer errors. + PACKETIZER_NO_PING_RESPONSE = (10500, "NoPingResponse", "Packetizer Error: No Ping Response") + PACKETIZER_INVALID_PACKET_TYPE = (10501, "InvalidPacketType", "Packetizer Error: No response received for ping command") + PACKETIZER_INVALID_CRC = (10502, "InvalidCRC", "Packetizer Error: Invalid packet type") + PACKETIZER_NO_COMMAND_RESPONSE = (10503, "NoCommandResponse", "Packetizer Error: No response received for command") + + # Reliable Update statuses. + RELIABLE_UPDATE_SUCCESS = (10600, "ReliableUpdateSuccess", "Reliable Update: Success") + RELIABLE_UPDATE_FAIL = (10601, "ReliableUpdateFail", "Reliable Update: Fail") + RELIABLE_UPDATE_INACTIVE = (10602, "ReliableUpdateInactive", "Reliable Update: Inactive") + RELIABLE_UPDATE_BACKUPAPPLICATIONINVALID = (10603, "ReliableUpdateBackupApplicationInvalid", "Reliable Update: Backup Application Invalid") + RELIABLE_UPDATE_STILLINMAINAPPLICATION = (10604, "ReliableUpdateStillInMainApplication", "Reliable Update: Still In Main Application") + RELIABLE_UPDATE_SWAPSYSTEMNOTREADY = (10605, "ReliableUpdateSwapSystemNotReady", "Reliable Update: Swap System Not Ready") + RELIABLE_UPDATE_BACKUPBOOTLOADERNOTREADY = (10606, "ReliableUpdateBackupBootloaderNotReady", "Reliable Update: Backup Bootloader Not Ready") + RELIABLE_UPDATE_SWAPINDICATORADDRESSINVALID = (10607, "ReliableUpdateSwapIndicatorAddressInvalid", "Reliable Update: Swap Indicator Address Invalid") + RELIABLE_UPDATE_SWAPSYSTEMNOTAVAILABLE = (10608, "ReliableUpdateSwapSystemNotAvailable", "Reliable Update: Swap System Not Available") + RELIABLE_UPDATE_SWAPTEST = (10609, "ReliableUpdateSwapTest", "Reliable Update: Swap Test") + + # Serial NOR/EEPROM statuses. + SERIAL_NOR_EEPROM_ADDRESS_INVALID = (10700, "SerialNorEepromAddressInvalid", "SerialNorEeprom: Address Invalid") + SERIAL_NOR_EEPROM_TRANSFER_ERROR = (10701, "SerialNorEepromTransferError", "SerialNorEeprom: Transfer Error") + SERIAL_NOR_EEPROM_TYPE_INVALID = (10702, "SerialNorEepromTypeInvalid", "SerialNorEeprom: Type Invalid") + SERIAL_NOR_EEPROM_SIZE_INVALID = (10703, "SerialNorEepromSizeInvalid", "SerialNorEeprom: Size Invalid") + SERIAL_NOR_EEPROM_COMMAND_INVALID = (10704, "SerialNorEepromCommandInvalid", "SerialNorEeprom: Command Invalid") + + # ROM API statuses. + ROM_API_NEED_MORE_DATA = (10800, "RomApiNeedMoreData", "RomApi: Need More Data") + ROM_API_BUFFER_SIZE_NOT_ENOUGH = (10801, "RomApiBufferSizeNotEnough", "RomApi: Buffer Size Not Enough") + ROM_API_INVALID_BUFFER = (10802, "RomApiInvalidBuffer", "RomApi: Invalid Buffer") + + # FlexSPI NAND statuses. + FLEXSPINAND_READ_PAGE_FAIL = (20000, "FlexSPINANDReadPageFail", "FlexSPINAND: Read Page Fail") + FLEXSPINAND_READ_CACHE_FAIL = (20001, "FlexSPINANDReadCacheFail", "FlexSPINAND: Read Cache Fail") + FLEXSPINAND_ECC_CHECK_FAIL = (20002, "FlexSPINANDEccCheckFail", "FlexSPINAND: Ecc Check Fail") + FLEXSPINAND_PAGE_LOAD_FAIL = (20003, "FlexSPINANDPageLoadFail", "FlexSPINAND: Page Load Fail") + FLEXSPINAND_PAGE_EXECUTE_FAIL = (20004, "FlexSPINANDPageExecuteFail", "FlexSPINAND: Page Execute Fail") + FLEXSPINAND_ERASE_BLOCK_FAIL = (20005, "FlexSPINANDEraseBlockFail", "FlexSPINAND: Erase Block Fail") + FLEXSPINAND_WAIT_TIMEOUT = (20006, "FlexSPINANDWaitTimeout", "FlexSPINAND: Wait Timeout") + FlexSPINAND_NOT_SUPPORTED = (20007, "SPINANDPageSizeOverTheMaxSupportedSize", "SPI NAND: PageSize over the max supported size") + FlexSPINAND_FCB_UPDATE_FAIL = (20008, "FailedToUpdateFlashConfigBlockToSPINAND", "SPI NAND: Failed to update Flash config block to SPI NAND") + FlexSPINAND_DBBT_UPDATE_FAIL = (20009, "Failed to update discovered bad block table to SPI NAND", "SPI NAND: Failed to update discovered bad block table to SPI NAND") + FLEXSPINAND_WRITEALIGNMENTERROR = (20010, "FlexSPINANDWriteAlignmentError", "FlexSPINAND: Write Alignment Error") + FLEXSPINAND_NOT_FOUND = (20011, "FlexSPINANDNotFound", "FlexSPINAND: Not Found") + + # FlexSPI NOR statuses. + FLEXSPINOR_PROGRAM_FAIL = (20100, "FLEXSPINORProgramFail", "FLEXSPINOR: Program Fail") + FLEXSPINOR_ERASE_SECTOR_FAIL = (20101, "FLEXSPINOREraseSectorFail", "FLEXSPINOR: Erase Sector Fail") + FLEXSPINOR_ERASE_ALL_FAIL = (20102, "FLEXSPINOREraseAllFail", "FLEXSPINOR: Erase All Fail") + FLEXSPINOR_WAIT_TIMEOUT = (20103, "FLEXSPINORWaitTimeout", "FLEXSPINOR:Wait Timeout") + FLEXSPINOR_NOT_SUPPORTED = (20104, "FLEXSPINORPageSizeOverTheMaxSupportedSize", "FlexSPINOR: PageSize over the max supported size") + FLEXSPINOR_WRITE_ALIGNMENT_ERROR = (20105, "FlexSPINORWriteAlignmentError", "FlexSPINOR:Write Alignment Error") + FLEXSPINOR_COMMANDFAILURE = (20106, "FlexSPINORCommandFailure", "FlexSPINOR: Command Failure") + FLEXSPINOR_SFDP_NOTFOUND = (20107, "FlexSPINORSFDPNotFound", "FlexSPINOR: SFDP Not Found") + FLEXSPINOR_UNSUPPORTED_SFDP_VERSION = (20108, "FLEXSPINORUnsupportedSFDPVersion", "FLEXSPINOR: Unsupported SFDP Version") + FLEXSPINOR_FLASH_NOTFOUND = (20109, "FLEXSPINORFlashNotFound", "FLEXSPINOR Flash Not Found") + FLEXSPINOR_DTR_READ_DUMMYPROBEFAILED = (20110, "FLEXSPINORDTRReadDummyProbeFailed", "FLEXSPINOR: DTR Read Dummy Probe Failed") + + # OCOTP statuses. + OCOTP_READ_FAILURE = (20200, "OCOTPReadFailure", "OCOTP: Read Failure") + OCOTP_PROGRAM_FAILURE = (20201, "OCOTPProgramFailure", "OCOTP: Program Failure") + OCOTP_RELOAD_FAILURE = (20202, "OCOTPReloadFailure", "OCOTP: Reload Failure") + OCOTP_WAIT_TIMEOUT = (20203, "OCOTPWaitTimeout", "OCOTP: Wait Timeout") + + # SEMC NOR statuses. + SEMCNOR_DEVICE_TIMEOUT = (21100, "SemcNOR_DeviceTimeout", "SemcNOR: Device Timeout") + SEMCNOR_INVALID_MEMORY_ADDRESS = (21101, "SemcNOR_InvalidMemoryAddress", "SemcNOR: Invalid Memory Address") + SEMCNOR_UNMATCHED_COMMAND_SET = (21102, "SemcNOR_unmatchedCommandSet", "SemcNOR: unmatched Command Set") + SEMCNOR_ADDRESS_ALIGNMENT_ERROR = (21103, "SemcNOR_AddressAlignmentError", "SemcNOR: Address Alignment Error") + SEMCNOR_INVALID_CFI_SIGNATURE = (21104, "SemcNOR_InvalidCfiSignature", "SemcNOR: Invalid Cfi Signature") + SEMCNOR_COMMAND_ERROR_NO_OP_TO_SUSPEND = (21105, "SemcNOR_CommandErrorNoOpToSuspend", "SemcNOR: Command Error No Op To Suspend") + SEMCNOR_COMMAND_ERROR_NO_INFO_AVAILABLE = (21106, "SemcNOR_CommandErrorNoInfoAvailable", "SemcNOR: Command Error No Info Available") + SEMCNOR_BLOCK_ERASE_COMMAND_FAILURE = (21107, "SemcNOR_BlockEraseCommandFailure", "SemcNOR: Block Erase Command Failure") + SEMCNOR_BUFFER_PROGRAM_COMMAND_FAILURE = (21108, "SemcNOR_BufferProgramCommandFailure", "SemcNOR: Buffer Program Command Failure") + SEMCNOR_PROGRAM_VERIFY_FAILURE = (21109, "SemcNOR_ProgramVerifyFailure", "SemcNOR: Program Verify Failure") + SEMCNOR_ERASE_VERIFY_FAILURE = (21110, "SemcNOR_EraseVerifyFailure", "SemcNOR: Erase Verify Failure") + SEMCNOR_INVALID_CFG_TAG = (21116, "SemcNOR_InvalidCfgTag", "SemcNOR: Invalid Cfg Tag") + + # SEMC NAND statuses. + SEMCNAND_DEVICE_TIMEOUT = (21200, "SemcNAND_DeviceTimeout", "SemcNAND: Device Timeout") + SEMCNAND_INVALID_MEMORY_ADDRESS = (21201, "SemcNAND_InvalidMemoryAddress", "SemcNAND: Invalid Memory Address") + SEMCNAND_NOT_EQUAL_TO_ONE_PAGE_SIZE = (21202, "SemcNAND_NotEqualToOnePageSize", "SemcNAND: Not Equal To One Page Size") + SEMCNAND_MORE_THAN_ONE_PAGE_SIZE = (21203, "SemcNAND_MoreThanOnePageSize", "SemcNAND: More Than One Page Size") + SEMCNAND_ECC_CHECK_FAIL = (21204, "SemcNAND_EccCheckFail", "SemcNAND: Ecc Check Fail") + SEMCNAND_INVALID_ONFI_PARAMETER = (21205, "SemcNAND_InvalidOnfiParameter", "SemcNAND: Invalid Onfi Parameter") + SEMCNAND_CANNOT_ENABLE_DEVICE_ECC = (21206, "SemcNAND_CannotEnableDeviceEcc", "SemcNAND: Cannot Enable Device Ecc") + SEMCNAND_SWITCH_TIMING_MODE_FAILURE = (21207, "SemcNAND_SwitchTimingModeFailure", "SemcNAND: Switch Timing Mode Failure") + SEMCNAND_PROGRAM_VERIFY_FAILURE = (21208, "SemcNAND_ProgramVerifyFailure", "SemcNAND: Program Verify Failure") + SEMCNAND_ERASE_VERIFY_FAILURE = (21209, "SemcNAND_EraseVerifyFailure", "SemcNAND: Erase Verify Failure") + SEMCNAND_INVALID_READBACK_BUFFER = (21210, "SemcNAND_InvalidReadbackBuffer", "SemcNAND: Invalid Readback Buffer") + SEMCNAND_INVALID_CFG_TAG = (21216, "SemcNAND_InvalidCfgTag", "SemcNAND: Invalid Cfg Tag") + SEMCNAND_FAIL_TO_UPDATE_FCB = (21217, "SemcNAND_FailToUpdateFcb", "SemcNAND: Fail To Update Fcb") + SEMCNAND_FAIL_TO_UPDATE_DBBT = (21218, "SemcNAND_FailToUpdateDbbt", "SemcNAND: Fail To Update Dbbt") + SEMCNAND_DISALLOW_OVERWRITE_BCB = (21219, "SemcNAND_DisallowOverwriteBcb", "SemcNAND: Disallow Overwrite Bcb") + SEMCNAND_ONLY_SUPPORT_ONFI_DEVICE = (21220, "SemcNAND_OnlySupportOnfiDevice", "SemcNAND: Only Support Onfi Device") + SEMCNAND_MORE_THAN_MAX_IMAGE_COPY = (21221, "SemcNAND_MoreThanMaxImageCopy", "SemcNAND: More Than Max Image Copy") + SEMCNAND_DISORDERED_IMAGE_COPIES = (21222, "SemcNAND_DisorderedImageCopies", "SemcNAND: Disordered Image Copies") + + # SPIFI NOR statuses. + SPIFINOR_PROGRAM_FAIL = (22000, "SPIFINOR_ProgramFail", "SPIFINOR: Program Fail") + SPIFINOR_ERASE_SECTORFAIL = (22001, "SPIFINOR_EraseSectorFail", "SPIFINOR: Erase Sector Fail") + SPIFINOR_ERASE_ALL_FAIL = (22002, "SPIFINOR_EraseAllFail", "SPIFINOR: Erase All Fail") + SPIFINOR_WAIT_TIMEOUT = (22003, "SPIFINOR_WaitTimeout", "SPIFINOR: Wait Timeout") + SPIFINOR_NOT_SUPPORTED = (22004, "SPIFINOR_NotSupported", "SPIFINOR: Not Supported") + SPIFINOR_WRITE_ALIGNMENTERROR = (22005, "SPIFINOR_WriteAlignmentError", "SPIFINOR: Write Alignment Error") + SPIFINOR_COMMAND_FAILURE = (22006, "SPIFINOR_CommandFailure", "SPIFINOR: Command Failure") + SPIFINOR_SFDP_NOT_FOUND = (22007, "SPIFINOR_SFDP_NotFound", "SPIFINOR: SFDP Not Found") + + # EDGELOCK ENCLAVE statuses. + EDGELOCK_INVALID_RESPONSE = (30000, "EDGELOCK_InvalidResponse", "EDGELOCK: Invalid Response") + EDGELOCK_RESPONSE_ERROR = (30001, "EDGELOCK_ResponseError", "EDGELOCK: Response Error") + EDGELOCK_ABORT = (30002, "EDGELOCK_Abort", "EDGELOCK: Abort") + EDGELOCK_OPERATION_FAILED = (30003, "EDGELOCK_OperationFailed", "EDGELOCK: Operation Failed") + EDGELOCK_OTP_PROGRAM_FAILURE = (30004, "EDGELOCK_OTPProgramFailure", "EDGELOCK: OTP Program Failure") + EDGELOCK_OTP_LOCKED = (30005, "EDGELOCK_OTPLocked", "EDGELOCK: OTP Locked") + EDGELOCK_OTP_INVALID_IDX = (30006, "EDGELOCK_OTPInvalidIDX", "EDGELOCK: OTP Invalid IDX") + EDGELOCK_INVALID_LIFECYCLE = (30007, "EDGELOCK_InvalidLifecycle", "EDGELOCK: Invalid Lifecycle") + + # OTP statuses. + OTP_INVALID_ADDRESS = (52801, "OTP_InvalidAddress", "OTD: Invalid OTP address") + OTP_PROGRAM_FAIL = (52802, "OTP_ProgrammingFail", "OTD: Programming failed") + OTP_CRC_FAIL = (52803, "OTP_CRCFail", "OTP: CRC check failed") + OTP_ERROR = (52804, "OTP_Error", "OTP: Error happened during OTP operation") + OTP_ECC_CRC_FAIL = (52805, "OTP_EccCheckFail", "OTP: ECC check failed during OTP operation") + OTP_LOCKED = (52806, "OTP_FieldLocked", "OTP: Field is locked when programming") + OTP_TIMEOUT = (52807, "OTP_Timeout", "OTP: Operation timed out") + OTP_CRC_CHECK_PASS = (52808, "OTP_CRCCheckPass", "OTP: CRC check passed") + OTP_VERIFY_FAIL = (52009, "OPT_VerifyFail", "OTP: Failed to verify OTP write") + + # Security subsystem statuses. + SECURITY_SUBSYSTEM_ERROR = (1515890085, "SecuritySubSystemError", "Security SubSystem Error") + + # TrustProvisioning statuses. + TP_SUCCESS = (0, "TP_SUCCESS", "TP: SUCCESS") + TP_GENERAL_ERROR = (80000, "TP_GENERAL_ERROR", "TP: General error") + TP_CRYPTO_ERROR = (80001, "TP_CRYPTO_ERROR", "TP: Error during cryptographic operation") + TP_NULLPTR_ERROR = (80002, "TP_NULLPTR_ERROR", "TP: NULL pointer dereference or when buffer could not be allocated") + TP_ALREADYINITIALIZED = (80003, "TP_ALREADYINITIALIZED", "TP: Already initialized") + TP_BUFFERSMALL = (80004, "TP_BUFFERSMALL", "TP: Buffer is too small") + TP_ADDRESS_ERROR = (80005, "TP_ADDRESS_ERROR", "TP: Address out of allowed range or buffer could not be allocated") + TP_CONTAINERINVALID = (80006, "TP_CONTAINERINVALID", "TP: Container header or size is invalid") + TP_CONTAINERENTRYINVALID = (80007, "TP_CONTAINERENTRYINVALID", "TP: Container entry invalid") + TP_CONTAINERENTRYNOTFOUND = (80008, "TP_CONTAINERENTRYNOTFOUND", "TP: Container entry not found in container") + TP_INVALIDSTATEOPERATION = (80009, "TP_INVALIDSTATEOPERATION", "TP: Attempt to process command in disallowed state") + TP_COMMAND_ERROR = (80010, "TP_COMMAND_ERROR", "TP: ISP command arguments are invalid") + TP_PUF_ERROR = (80011, "TP_PUF_ERROR", "TP: PUF operation error") + TP_FLASH_ERROR = (80012, "TP_FLASH_ERROR", "TP: Flash erase/program/verify_erase failed") + TP_SECRETBOX_ERROR = (80013, "TP_SECRETBOX_ERROR", "TP: SBKEK or USER KEK cannot be stored in secret box") + TP_PFR_ERROR = (80014, "TP_PFR_ERROR", "TP: Protected Flash Region operation failed") + TP_VERIFICATION_ERROR = (80015, "TP_VERIFICATION_ERROR", "TP: Container signature verification failed") + TP_CFPA_ERROR = (80016, "TP_CFPA_ERROR", "TP: CFPA page cannot be stored") + TP_CMPA_ERROR = (80017, "TP_CMPA_ERROR", "TP: CMPA page cannot be stored or ROTKH or SECU registers are invalid") + TP_ADDR_OUT_OF_RANGE = (80018, "TP_ADDR_OUT_OF_RANGE", "TP: Address is out of range") + TP_CONTAINER_ADDR_ERROR = (80019, "TP_CONTAINER_ADDR_ERROR", "TP: Container address in write context is invalid or there is no memory for entry storage") + TP_CONTAINER_ADDR_UNALIGNED = (80020, "TP_CONTAINER_ADDR_UNALIGNED", "TP: Container address in read context is unaligned") + TP_CONTAINER_BUFF_SMALL = (80021, "TP_CONTAINER_BUFF_SMALL", "TP: There is not enough memory to store the container") + TP_CONTAINER_NO_ENTRY = (80022, "TP_CONTAINER_NO_ENTRY", "TP: Attempt to sign an empty container") + TP_CERT_ADDR_ERROR = (80023, "TP_CERT_ADDR_ERROR", "TP: Destination address of OEM certificate is invalid") + TP_CERT_ADDR_UNALIGNED = (80024, "TP_CERT_ADDR_UNALIGNED", "TP: Destination address of certificate is unaligned") + TP_CERT_OVERLAPPING = (80025, "TP_CERT_OVERLAPPING", "TP: OEM certificates are overlapping due to wrong destination addresses") + TP_PACKET_ERROR = (80026, "TP_PACKET_ERROR", "TP: Error during packet sending/receiving") + TP_PACKET_DATA_ERROR = (80027, "TP_PACKET_DATA_ERROR", "TP: Data in packet handle are invalid") + TP_UNKNOWN_COMMAND = (80028, "TP_UNKNOWN_COMMAND", "TP: Unknown command was received") + TP_SB3_FILE_ERROR = (80029, "TP_SB3_FILE_ERROR", "TP: Error during processing SB3 file") + # TP_CRITICAL_ERROR_START (80100) + TP_GENERAL_CRITICAL_ERROR = (80101, "TP_GENERAL_CRITICAL_ERROR", "TP: Critical error") + TP_CRYPTO_CRITICAL_ERROR = (80102, "TP_CRYPTO_CRITICAL_ERROR", "TP: Error of crypto module which prevents proper functionality") + TP_PUF_CRITICAL_ERROR = (80103, "TP_PUF_CRITICAL_ERROR", "TP: Initialization or start of the PUF periphery failed") + TP_PFR_CRITICAL_ERROR = (80104, "TP_PFR_CRITICAL_ERROR", "TP: Initialization of PFR or reading of activation code failed") + TP_PERIPHERAL_CRITICAL_ERROR = (80105, "TP_PERIPHERAL_CRITICAL_ERROR", "TP: Peripheral failure") + TP_PRINCE_CRITICAL_ERROR = (80106, "TP_PRINCE_CRITICAL_ERROR", "TP: Error during PRINCE encryption/decryption") + TP_SHA_CHECK_CRITICAL_ERROR = (80107, "TP_SHA_CHECK_CRITICAL_ERROR", "TP: SHA check verification failed") + + # IAP statuses. + IAP_INVALID_ARGUMENT = (100001, "IAP_InvalidArgument", "IAP: Invalid Argument Detected During API Execution") + IAP_OUT_OF_MEMORY = (100002, "IAP_OutOfMemory", "IAP: Heap Size Not Large Enough During API Execution") + IAP_READ_DISALLOWED = (100003, "IAP_ReadDisallowed ", "IAP: Read Memory Operation Disallowed During API Execution") + IAP_CUMULATIVE_WRITE = (100004, "IAP_CumulativeWrite", "IAP: Flash Memory Region To Be Programmed Is Not Empty") + IAP_ERASE_FAILUIRE = (100005, "IAP_EraseFailuire", "IAP: Erase Operation Failed") + IAP_COMMAND_NOT_SUPPORTED = (100006, "IAP_CommandNotSupported", "IAP: Specific Command Not Supported") + IAP_MEMORY_ACCESS_DISABLED = (100007, "IAP_MemoryAccessDisabled", "IAP: Memory Access Disabled") +# fmt: on + + +def stringify_status_code(status_code: int) -> str: + """Stringifies the MBoot status code.""" + return ( + f"{status_code} ({status_code:#x}) " + f"{StatusCode.get_description(status_code) if status_code in StatusCode.tags() else f'Unknown error code ({status_code})'}." + ) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/exceptions.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/exceptions.py new file mode 100644 index 00000000..ea7320df --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/exceptions.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Exceptions used in the MBoot module.""" + +from ..exceptions import SPSDKError +from .error_codes import StatusCode + +######################################################################################################################## +# McuBoot Exceptions +######################################################################################################################## + + +class McuBootError(SPSDKError): + """MBoot Module: Base Exception.""" + + fmt = "MBoot: {description}" + + +class McuBootCommandError(McuBootError): + """MBoot Module: Command Exception.""" + + fmt = "MBoot: {cmd_name} interrupted -> {description}" + + def __init__(self, cmd: str, value: int) -> None: + """Initialize the Command Error exception. + + :param cmd: Name of the command causing the exception + :param value: Response value causing the exception + """ + super().__init__() + self.cmd_name = cmd + self.error_value = value + self.description = ( + StatusCode.get_description(value) + if value in StatusCode.tags() + else f"Unknown Error 0x{value:08X}" + ) + + def __str__(self) -> str: + return self.fmt.format(cmd_name=self.cmd_name, description=self.description) + + +class McuBootDataAbortError(McuBootError): + """MBoot Module: Data phase aborted by sender.""" + + fmt = "Mboot: Data aborted by sender" + + +class McuBootConnectionError(McuBootError): + """MBoot Module: Connection Exception.""" + + fmt = "MBoot: Connection issue -> {description}" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/__init__.py new file mode 100644 index 00000000..eff4a84e --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright (c) 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module implementing the Mboot communication protocol.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/usb.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/usb.py new file mode 100644 index 00000000..3d104410 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/interfaces/usb.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""USB Mboot interface implementation.""" + + +from dataclasses import dataclass +from typing import List, Optional + +from typing_extensions import Self + +from ...mboot.protocol.bulk_protocol import MbootBulkProtocol +from ...utils.interfaces.device.usb_device import UsbDevice + + +@dataclass +class ScanArgs: + """Scan arguments dataclass.""" + + device_id: str + + @classmethod + def parse(cls, params: str) -> Self: + """Parse given scanning parameters into ScanArgs class. + + :param params: Parameters as a string + """ + return cls(device_id=params.replace(",", ":")) + + +USB_DEVICES = { + # NAME | VID | PID + "MKL27": (0x15A2, 0x0073), + "LPC55": (0x1FC9, 0x0021), + "IMXRT": (0x1FC9, 0x0135), + "MXRT10": (0x15A2, 0x0073), # this is ID of flash-loader for RT101x + "MXRT20": (0x15A2, 0x0073), # this is ID of flash-loader for RT102x + "MXRT50": (0x15A2, 0x0073), # this is ID of flash-loader for RT105x + "MXRT60": (0x15A2, 0x0073), # this is ID of flash-loader for RT106x + "LPC55xx": (0x1FC9, 0x0020), + "LPC551x": (0x1FC9, 0x0022), + "RT6xx": (0x1FC9, 0x0021), + "RT5xx_A": (0x1FC9, 0x0020), + "RT5xx_B": (0x1FC9, 0x0023), + "RT5xx_C": (0x1FC9, 0x0023), + "RT5xx": (0x1FC9, 0x0023), + "RT6xxM": (0x1FC9, 0x0024), + "LPC553x": (0x1FC9, 0x0025), + "MCXN9xx": (0x1FC9, 0x014F), + "MCXA1xx": (0x1FC9, 0x0155), + "MCXN23x": (0x1FC9, 0x0158), +} + + +class MbootUSBInterface(MbootBulkProtocol): + """USB interface.""" + + identifier = "usb" + device: UsbDevice + usb_devices = USB_DEVICES + + def __init__(self, device: UsbDevice) -> None: + """Initialize the MbootUSBInterface object. + + :param device: The device instance + """ + assert isinstance(device, UsbDevice) + super().__init__(device=device) + + @property + def name(self) -> str: + """Get the name of the device.""" + assert isinstance(self.device, UsbDevice) + for name, value in self.usb_devices.items(): + if value[0] == self.device.vid and value[1] == self.device.pid: + return name + return "Unknown" + + @classmethod + def scan_from_args( + cls, + params: str, + timeout: int, + extra_params: Optional[str] = None, + ) -> List[Self]: + """Scan connected USB devices. + + :param params: Params as a configuration string + :param extra_params: Extra params configuration string + :param timeout: Timeout for the scan + :return: list of matching RawHid devices + """ + scan_args = ScanArgs.parse(params=params) + devices = cls.scan(device_id=scan_args.device_id, timeout=timeout) + return devices + + @classmethod + def scan( + cls, + device_id: Optional[str] = None, + timeout: Optional[int] = None, + ) -> List[Self]: + """Scan connected USB devices. + + :param device_id: Device identifier , , device/instance path, device name are supported + :param timeout: Read/write timeout + :return: list of matching RawHid devices + """ + devices = UsbDevice.scan( + device_id=device_id, usb_devices_filter=cls.usb_devices, timeout=timeout + ) + return [cls(device) for device in devices] diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/mcuboot.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/mcuboot.py new file mode 100644 index 00000000..4bb402d6 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/mcuboot.py @@ -0,0 +1,1779 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module for communication with the bootloader.""" + +import logging +import struct +import time +from types import TracebackType +from typing import Callable, Dict, List, Optional, Sequence, Type + +from ..mboot.protocol.base import MbootProtocolBase +from ..utils.interfaces.device.usb_device import UsbDevice +from .commands import ( + CmdPacket, + CmdResponse, + CommandFlag, + CommandTag, + FlashReadOnceResponse, + FlashReadResourceResponse, + GenerateKeyBlobSelect, + GenericResponse, + GetPropertyResponse, + KeyProvisioningResponse, + KeyProvOperation, + NoResponse, + ReadMemoryResponse, + TrustProvDevHsmDsc, + TrustProvisioningResponse, + TrustProvOperation, + TrustProvWpc, +) +from .error_codes import StatusCode, stringify_status_code +from .exceptions import ( + McuBootCommandError, + McuBootConnectionError, + McuBootDataAbortError, + McuBootError, + SPSDKError, +) +from .memories import ExtMemId, ExtMemRegion, FlashRegion, MemoryRegion, RamRegion +from .properties import PropertyTag, PropertyValueBase, Version, parse_property_value + +logger = logging.getLogger(__name__) + + +######################################################################################################################## +# McuBoot Class +######################################################################################################################## +class McuBoot: # pylint: disable=too-many-public-methods + """Class for communication with the bootloader.""" + + DEFAULT_MAX_PACKET_SIZE = 32 + + @property + def status_code(self) -> int: + """Return status code of the last operation.""" + return self._status_code + + @property + def status_string(self) -> str: + """Return status string.""" + return stringify_status_code(self._status_code) + + @property + def is_opened(self) -> bool: + """Return True if the device is open.""" + return self._interface.is_opened + + def __init__( + self, interface: MbootProtocolBase, cmd_exception: bool = False + ) -> None: + """Initialize the McuBoot object. + + :param interface: The instance of communication interface class + :param cmd_exception: True to throw McuBootCommandError on any error; + False to set status code only + Note: some operation might raise McuBootCommandError is all cases + + """ + self._cmd_exception = cmd_exception + self._status_code = StatusCode.SUCCESS.tag + self._interface = interface + self.reopen = False + self.enable_data_abort = False + self._pause_point: Optional[int] = None + + def __enter__(self) -> "McuBoot": + self.reopen = True + self.open() + return self + + def __exit__( + self, + exception_type: Optional[Type[Exception]] = None, + exception_value: Optional[Exception] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + def _process_cmd(self, cmd_packet: CmdPacket) -> CmdResponse: + """Process Command. + + :param cmd_packet: Command Packet + :return: command response derived from the CmdResponse + :raises McuBootConnectionError: Timeout Error + :raises McuBootCommandError: Error during command execution on the target + """ + if not self.is_opened: + logger.info("TX: Device not opened") + raise McuBootConnectionError("Device not opened") + + logger.debug(f"TX-PACKET: {str(cmd_packet)}") + + try: + self._interface.write_command(cmd_packet) + response = self._interface.read() + except TimeoutError: + self._status_code = StatusCode.NO_RESPONSE.tag + logger.debug("RX-PACKET: No Response, Timeout Error !") + response = NoResponse(cmd_tag=cmd_packet.header.tag) + + assert isinstance(response, CmdResponse) + logger.debug(f"RX-PACKET: {str(response)}") + self._status_code = response.status + + if self._cmd_exception and self._status_code != StatusCode.SUCCESS: + raise McuBootCommandError( + CommandTag.get_label(cmd_packet.header.tag), response.status + ) + logger.info(f"CMD: Status: {self.status_string}") + return response + + def _read_data( + self, + cmd_tag: CommandTag, + length: int, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> bytes: + """Read data from device. + + :param cmd_tag: Tag indicating the read command. + :param length: Length of data to read + :param progress_callback: Callback for updating the caller about the progress + :raises McuBootConnectionError: Timeout error or a problem opening the interface + :raises McuBootCommandError: Error during command execution on the target + :return: Data read from the device + """ + data = b"" + + if not self.is_opened: + logger.error("RX: Device not opened") + raise McuBootConnectionError("Device not opened") + while True: + try: + response = self._interface.read() + except McuBootDataAbortError as e: + logger.error(f"RX: {e}") + logger.info("Try increasing the timeout value") + response = self._interface.read() + except TimeoutError: + self._status_code = StatusCode.NO_RESPONSE.tag + logger.error("RX: No Response, Timeout Error !") + response = NoResponse(cmd_tag=cmd_tag.tag) + break + + if isinstance(response, bytes): + data += response + if progress_callback: + progress_callback(len(data), length) + + elif isinstance(response, GenericResponse): + logger.debug(f"RX-PACKET: {str(response)}") + self._status_code = response.status + if response.cmd_tag == cmd_tag: + break + + if len(data) < length or self.status_code != StatusCode.SUCCESS: + status_info = ( + StatusCode.get_label(self._status_code) + if self._status_code in StatusCode.tags() + else f"0x{self._status_code:08X}" + ) + logger.debug( + f"CMD: Received {len(data)} from {length} Bytes, {status_info}" + ) + if self._cmd_exception: + assert isinstance(response, CmdResponse) + raise McuBootCommandError(cmd_tag.label, response.status) + else: + logger.info(f"CMD: Successfully Received {len(data)} from {length} Bytes") + + return data[:length] if len(data) > length else data + + def _send_data( + self, + cmd_tag: CommandTag, + data: List[bytes], + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> bool: + """Send Data part of specific command. + + :param cmd_tag: Tag indicating the command + :param data: List of data chunks to send + :param progress_callback: Callback for updating the caller about the progress + :raises McuBootConnectionError: Timeout error + :raises McuBootCommandError: Error during command execution on the target + :return: True if the operation is successful + """ + if not self.is_opened: + logger.info("TX: Device Disconnected") + raise McuBootConnectionError("Device Disconnected !") + + total_sent = 0 + total_to_send = sum(len(chunk) for chunk in data) + # this difference is applicable for load-image and program-aeskey commands + expect_response = cmd_tag != CommandTag.NO_COMMAND + self._interface.allow_abort = self.enable_data_abort + try: + for data_chunk in data: + self._interface.write_data(data_chunk) + total_sent += len(data_chunk) + if progress_callback: + progress_callback(total_sent, total_to_send) + if self._pause_point and total_sent > self._pause_point: + time.sleep(0.1) + self._pause_point = None + + if expect_response: + response = self._interface.read() + except TimeoutError as e: + self._status_code = StatusCode.NO_RESPONSE.tag + logger.error("RX: No Response, Timeout Error !") + raise McuBootConnectionError("No Response from Device") from e + except SPSDKError as e: + logger.error(f"RX: {e}") + if expect_response: + response = self._interface.read() + else: + self._status_code = StatusCode.SENDING_OPERATION_CONDITION_ERROR.tag + + if expect_response: + assert isinstance(response, CmdResponse) + logger.debug(f"RX-PACKET: {str(response)}") + self._status_code = response.status + if response.status != StatusCode.SUCCESS: + status_info = ( + StatusCode.get_label(self._status_code) + if self._status_code in StatusCode.tags() + else f"0x{self._status_code:08X}" + ) + logger.debug(f"CMD: Send Error, {status_info}") + if self._cmd_exception: + raise McuBootCommandError(cmd_tag.label, response.status) + return False + + logger.info(f"CMD: Successfully Send {total_sent} out of {total_to_send} Bytes") + return total_sent == total_to_send + + def _get_max_packet_size(self) -> int: + """Get max packet size. + + :return int: max packet size in B + """ + packet_size_property = None + try: + packet_size_property = self.get_property( + prop_tag=PropertyTag.MAX_PACKET_SIZE + ) + except McuBootError: + pass + if packet_size_property is None: + packet_size_property = [self.DEFAULT_MAX_PACKET_SIZE] + logger.warning( + f"CMD: Unable to get MAX PACKET SIZE, using: {self.DEFAULT_MAX_PACKET_SIZE}" + ) + return packet_size_property[0] + + def _split_data(self, data: bytes) -> List[bytes]: + """Split data to send if necessary. + + :param data: Data to send + :return: List of data splices + """ + if not self._interface.need_data_split: + return [data] + max_packet_size = self._get_max_packet_size() + logger.info(f"CMD: Max Packet Size = {max_packet_size}") + return [ + data[i : i + max_packet_size] for i in range(0, len(data), max_packet_size) + ] + + def open(self) -> None: + """Connect to the device.""" + logger.info(f"Connect: {str(self._interface)}") + self._interface.open() + + def close(self) -> None: + """Disconnect from the device.""" + logger.info(f"Closing: {str(self._interface)}") + self._interface.close() + + def get_property_list(self) -> List[PropertyValueBase]: + """Get a list of available properties. + + :return: List of available properties. + :raises McuBootCommandError: Failure to read properties list + """ + property_list: List[PropertyValueBase] = [] + for property_tag in PropertyTag: + try: + values = self.get_property(property_tag) + except McuBootCommandError: + continue + + if values: + prop = parse_property_value(property_tag.tag, values) + assert prop is not None, "Property values cannot be parsed" + property_list.append(prop) + + self._status_code = StatusCode.SUCCESS.tag + if not property_list: + self._status_code = StatusCode.FAIL.tag + if self._cmd_exception: + raise McuBootCommandError("GetPropertyList", self.status_code) + + return property_list + + def _get_internal_flash(self) -> List[FlashRegion]: + """Get information about the internal flash. + + :return: list of FlashRegion objects + """ + index = 0 + mdata: List[FlashRegion] = [] + start_address = 0 + while True: + try: + values = self.get_property(PropertyTag.FLASH_START_ADDRESS, index) + if not values: + break + if index == 0: + start_address = values[0] + elif start_address == values[0]: + break + region_start = values[0] + values = self.get_property(PropertyTag.FLASH_SIZE, index) + if not values: + break + region_size = values[0] + values = self.get_property(PropertyTag.FLASH_SECTOR_SIZE, index) + if not values: + break + region_sector_size = values[0] + mdata.append( + FlashRegion( + index=index, + start=region_start, + size=region_size, + sector_size=region_sector_size, + ) + ) + index += 1 + except McuBootCommandError: + break + + return mdata + + def _get_internal_ram(self) -> List[RamRegion]: + """Get information about the internal RAM. + + :return: list of RamRegion objects + """ + index = 0 + mdata: List[RamRegion] = [] + start_address = 0 + while True: + try: + values = self.get_property(PropertyTag.RAM_START_ADDRESS, index) + if not values: + break + if index == 0: + start_address = values[0] + elif start_address == values[0]: + break + start = values[0] + values = self.get_property(PropertyTag.RAM_SIZE, index) + if not values: + break + size = values[0] + mdata.append(RamRegion(index=index, start=start, size=size)) + index += 1 + except McuBootCommandError: + break + + return mdata + + def _get_ext_memories(self) -> List[ExtMemRegion]: + """Get information about the external memories. + + :return: list of ExtMemRegion objects supported by the device + :raises SPSDKError: If no response to get property command + :raises SPSDKError: Other Error + """ + ext_mem_list: List[ExtMemRegion] = [] + ext_mem_ids: Sequence[int] = ExtMemId.tags() + try: + values = self.get_property(PropertyTag.CURRENT_VERSION) + except McuBootCommandError: + values = None + + if not values and self._status_code == StatusCode.UNKNOWN_PROPERTY: + self._status_code = StatusCode.SUCCESS.tag + return ext_mem_list + + if not values: + raise SPSDKError("No response to get property command") + + if Version(values[0]) <= Version("2.0.0"): + # old versions mboot support only Quad SPI memory + ext_mem_ids = [ExtMemId.QUAD_SPI0.tag] + + for mem_id in ext_mem_ids: + try: + values = self.get_property( + PropertyTag.EXTERNAL_MEMORY_ATTRIBUTES, mem_id + ) + except McuBootCommandError: + values = None + + if ( + not values + ): # pragma: no cover # corner-cases are currently untestable without HW + if self._status_code == StatusCode.UNKNOWN_PROPERTY: + break + + if self._status_code in [ + StatusCode.QSPI_NOT_CONFIGURED, + StatusCode.INVALID_ARGUMENT, + ]: + continue + + if self._status_code == StatusCode.MEMORY_NOT_CONFIGURED: + ext_mem_list.append(ExtMemRegion(mem_id=mem_id)) + + if self._status_code == StatusCode.SUCCESS: + raise SPSDKError("Other Error") + + else: + ext_mem_list.append(ExtMemRegion(mem_id=mem_id, raw_values=values)) + return ext_mem_list + + def get_memory_list(self) -> dict: + """Get list of embedded memories. + + :return: dict, with the following keys: internal_flash (optional) - list , + internal_ram (optional) - list, external_mems (optional) - list + :raises McuBootCommandError: Error reading the memory list + """ + memory_list: Dict[str, Sequence[MemoryRegion]] = {} + + # Internal FLASH + mdata = self._get_internal_flash() + if mdata: + memory_list["internal_flash"] = mdata + + # Internal RAM + ram_data = self._get_internal_ram() + if mdata: + memory_list["internal_ram"] = ram_data + + # External Memories + ext_mem_list = self._get_ext_memories() + if ext_mem_list: + memory_list["external_mems"] = ext_mem_list + + self._status_code = StatusCode.SUCCESS.tag + if not memory_list: + self._status_code = StatusCode.FAIL.tag + if self._cmd_exception: + raise McuBootCommandError("GetMemoryList", self.status_code) + + return memory_list + + def flash_erase_all(self, mem_id: int = 0) -> bool: + """Erase complete flash memory without recovering flash security section. + + :param mem_id: Memory ID + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: FlashEraseAll(mem_id={mem_id})") + cmd_packet = CmdPacket(CommandTag.FLASH_ERASE_ALL, CommandFlag.NONE.tag, mem_id) + response = self._process_cmd(cmd_packet) + return response.status == StatusCode.SUCCESS + + def flash_erase_region(self, address: int, length: int, mem_id: int = 0) -> bool: + """Erase specified range of flash. + + :param address: Start address + :param length: Count of bytes + :param mem_id: Memory ID + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: FlashEraseRegion(address=0x{address:08X}, length={length}, mem_id={mem_id})" + ) + mem_id = _clamp_down_memory_id(memory_id=mem_id) + cmd_packet = CmdPacket( + CommandTag.FLASH_ERASE_REGION, CommandFlag.NONE.tag, address, length, mem_id + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def read_memory( + self, + address: int, + length: int, + mem_id: int = 0, + progress_callback: Optional[Callable[[int, int], None]] = None, + fast_mode: bool = False, + ) -> Optional[bytes]: + """Read data from MCU memory. + + :param address: Start address + :param length: Count of bytes + :param mem_id: Memory ID + :param fast_mode: Fast mode for USB-HID data transfer, not reliable !!! + :param progress_callback: Callback for updating the caller about the progress + :return: Data read from the memory; None in case of a failure + """ + logger.info( + f"CMD: ReadMemory(address=0x{address:08X}, length={length}, mem_id={mem_id})" + ) + mem_id = _clamp_down_memory_id(memory_id=mem_id) + + # workaround for better USB-HID reliability + if isinstance(self._interface.device, UsbDevice) and not fast_mode: + payload_size = self._get_max_packet_size() + packets = length // payload_size + remainder = length % payload_size + if remainder: + packets += 1 + + data = b"" + + for idx in range(packets): + if idx == packets - 1 and remainder: + data_len = remainder + else: + data_len = payload_size + + cmd_packet = CmdPacket( + CommandTag.READ_MEMORY, + CommandFlag.NONE.tag, + address + idx * payload_size, + data_len, + mem_id, + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + data += self._read_data(CommandTag.READ_MEMORY, data_len) + if progress_callback: + progress_callback(len(data), length) + if self._status_code == StatusCode.NO_RESPONSE: + logger.warning( + f"CMD: NO RESPONSE, received {len(data)}/{length} B" + ) + return data + else: + return b"" + + return data + + cmd_packet = CmdPacket( + CommandTag.READ_MEMORY, CommandFlag.NONE.tag, address, length, mem_id + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, ReadMemoryResponse) + return self._read_data( + CommandTag.READ_MEMORY, cmd_response.length, progress_callback + ) + return None + + def write_memory( + self, + address: int, + data: bytes, + mem_id: int = 0, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> bool: + """Write data into MCU memory. + + :param address: Start address + :param data: List of bytes + :param progress_callback: Callback for updating the caller about the progress + :param mem_id: Memory ID, see ExtMemId; additionally use `0` for internal memory + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: WriteMemory(address=0x{address:08X}, length={len(data)}, mem_id={mem_id})" + ) + data_chunks = self._split_data(data=data) + mem_id = _clamp_down_memory_id(memory_id=mem_id) + cmd_packet = CmdPacket( + CommandTag.WRITE_MEMORY, + CommandFlag.HAS_DATA_PHASE.tag, + address, + len(data), + mem_id, + ) + if self._process_cmd(cmd_packet).status == StatusCode.SUCCESS: + return self._send_data( + CommandTag.WRITE_MEMORY, data_chunks, progress_callback + ) + return False + + def fill_memory(self, address: int, length: int, pattern: int = 0xFFFFFFFF) -> bool: + """Fill MCU memory with specified pattern. + + :param address: Start address (must be word aligned) + :param length: Count of words (must be word aligned) + :param pattern: Count of wrote bytes + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: FillMemory(address=0x{address:08X}, length={length}, pattern=0x{pattern:08X})" + ) + cmd_packet = CmdPacket( + CommandTag.FILL_MEMORY, CommandFlag.NONE.tag, address, length, pattern + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def flash_security_disable(self, backdoor_key: bytes) -> bool: + """Disable flash security by using of backdoor key. + + :param backdoor_key: The key value as array of 8 bytes + :return: False in case of any problem; True otherwise + :raises McuBootError: If the backdoor_key is not 8 bytes long + """ + if len(backdoor_key) != 8: + raise McuBootError("Backdoor key must by 8 bytes long") + logger.info(f"CMD: FlashSecurityDisable(backdoor_key={backdoor_key!r})") + key_high = backdoor_key[0:4][::-1] + key_low = backdoor_key[4:8][::-1] + cmd_packet = CmdPacket( + CommandTag.FLASH_SECURITY_DISABLE, + CommandFlag.NONE.tag, + data=key_high + key_low, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def get_property( + self, prop_tag: PropertyTag, index: int = 0 + ) -> Optional[List[int]]: + """Get specified property value. + + :param prop_tag: Property TAG (see Properties Enum) + :param index: External memory ID or internal memory region index (depends on property type) + :return: list integers representing the property; None in case no response from device + :raises McuBootError: If received invalid get-property response + """ + logger.info(f"CMD: GetProperty({prop_tag.label}, index={index!r})") + cmd_packet = CmdPacket( + CommandTag.GET_PROPERTY, CommandFlag.NONE.tag, prop_tag.tag, index + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + if isinstance(cmd_response, GetPropertyResponse): + return cmd_response.values + raise McuBootError( + f"Received invalid get-property response: {str(cmd_response)}" + ) + return None + + def set_property(self, prop_tag: PropertyTag, value: int) -> bool: + """Set value of specified property. + + :param prop_tag: Property TAG (see Property enumerator) + :param value: The value of selected property + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: SetProperty({prop_tag.label}, value=0x{value:08X})") + cmd_packet = CmdPacket( + CommandTag.SET_PROPERTY, CommandFlag.NONE.tag, prop_tag.tag, value + ) + cmd_response = self._process_cmd(cmd_packet) + return cmd_response.status == StatusCode.SUCCESS + + def receive_sb_file( + self, + data: bytes, + progress_callback: Optional[Callable[[int, int], None]] = None, + check_errors: bool = False, + ) -> bool: + """Receive SB file. + + :param data: SB file data + :param progress_callback: Callback for updating the caller about the progress + :param check_errors: Check for ABORT_FRAME (and related errors) on USB interface between data packets. + When this parameter is set to `False` significantly improves USB transfer speed (cca 20x) + However, the final status code might be misleading (original root cause may get overridden) + In case `receive-sb-file` fails, re-run the operation with this flag set to `True` + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: ReceiveSBfile(data_length={len(data)})") + data_chunks = self._split_data(data=data) + cmd_packet = CmdPacket( + CommandTag.RECEIVE_SB_FILE, CommandFlag.HAS_DATA_PHASE.tag, len(data) + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + self.enable_data_abort = check_errors + if isinstance(self._interface.device, UsbDevice): + try: + # pylint: disable=import-outside-toplevel # import only if needed to save time + from ..sbfile.sb2.images import ImageHeaderV2 + + sb2_header = ImageHeaderV2.parse(data=data) + self._pause_point = sb2_header.first_boot_tag_block * 16 + except SPSDKError: + pass + # Deactivated for pynitrokey + # try: + # # pylint: disable=import-outside-toplevel # import only if needed to save time + # from spsdk.sbfile.sb31.images import SecureBinary31Header + + # sb3_header = SecureBinary31Header.parse(data=data) + # self._pause_point = sb3_header.image_total_length + # except SPSDKError: + # pass + result = self._send_data( + CommandTag.RECEIVE_SB_FILE, data_chunks, progress_callback + ) + self.enable_data_abort = False + return result + return False + + def execute( + self, address: int, argument: int, sp: int + ) -> bool: # pylint: disable=invalid-name + """Execute program on a given address using the stack pointer. + + :param address: Jump address (must be word aligned) + :param argument: Function arguments address + :param sp: Stack pointer address + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: Execute(address=0x{address:08X}, argument=0x{argument:08X}, SP=0x{sp:08X})" + ) + cmd_packet = CmdPacket( + CommandTag.EXECUTE, CommandFlag.NONE.tag, address, argument, sp + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def call(self, address: int, argument: int) -> bool: + """Fill MCU memory with specified pattern. + + :param address: Call address (must be word aligned) + :param argument: Function arguments address + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: Call(address=0x{address:08X}, argument=0x{argument:08X})") + cmd_packet = CmdPacket(CommandTag.CALL, CommandFlag.NONE.tag, address, argument) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def reset(self, timeout: int = 2000, reopen: bool = True) -> bool: + """Reset MCU and reconnect if enabled. + + :param timeout: The maximal waiting time in [ms] for reopen connection + :param reopen: True for reopen connection after HW reset else False + :return: False in case of any problem; True otherwise + :raises McuBootError: if reopen is not supported + :raises McuBootConnectionError: Failure to reopen the device + """ + logger.info("CMD: Reset MCU") + cmd_packet = CmdPacket(CommandTag.RESET, CommandFlag.NONE.tag) + ret_val = False + status = self._process_cmd(cmd_packet).status + self.close() + ret_val = True + + if status not in [StatusCode.NO_RESPONSE, StatusCode.SUCCESS]: + ret_val = False + if self._cmd_exception: + raise McuBootConnectionError("Reset command failed") + + if status == StatusCode.NO_RESPONSE: + logger.warning("Did not receive response from reset command, ignoring it") + self._status_code = StatusCode.SUCCESS.tag + + if reopen: + if not self.reopen: + raise McuBootError("reopen is not supported") + time.sleep(timeout / 1000) + try: + self.open() + except SPSDKError as e: + ret_val = False + if self._cmd_exception: + raise McuBootConnectionError("reopen failed") from e + + return ret_val + + def flash_erase_all_unsecure(self) -> bool: + """Erase complete flash memory and recover flash security section. + + :return: False in case of any problem; True otherwise + """ + logger.info("CMD: FlashEraseAllUnsecure") + cmd_packet = CmdPacket( + CommandTag.FLASH_ERASE_ALL_UNSECURE, CommandFlag.NONE.tag + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def efuse_read_once(self, index: int) -> Optional[int]: + """Read from MCU flash program once region. + + :param index: Start index + :return: read value (32-bit int); None if operation failed + """ + logger.info(f"CMD: FlashReadOnce(index={index})") + cmd_packet = CmdPacket( + CommandTag.FLASH_READ_ONCE, CommandFlag.NONE.tag, index, 4 + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, FlashReadOnceResponse) + return cmd_response.values[0] + return None + + def efuse_program_once(self, index: int, value: int, verify: bool = False) -> bool: + """Write into MCU once program region (OCOTP). + + :param index: Start index + :param value: Int value (4 bytes long) + :param verify: Verify that data were written (by comparing value as bitmask) + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: FlashProgramOnce(index={index}, value=0x{value:X}) " + f"with{'' if verify else 'out'} verification." + ) + cmd_packet = CmdPacket( + CommandTag.FLASH_PROGRAM_ONCE, CommandFlag.NONE.tag, index, 4, value + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status != StatusCode.SUCCESS: + return False + if verify: + read_value = self.efuse_read_once(index=index & ((1 << 24) - 1)) + if read_value is None: + return False + # We check only a bitmask, because OTP allows to burn individual bits separately + # Some other bits may have been already written + if read_value & value == value: + return True + # It may happen that ROM will not report error when attempting to write into locked OTP + # In such case we substitute the original SUCCESS code with custom-made OTP_VERIFY_FAIL + self._status_code = StatusCode.OTP_VERIFY_FAIL.tag + return False + return cmd_response.status == StatusCode.SUCCESS + + def flash_read_once(self, index: int, count: int = 4) -> Optional[bytes]: + """Read from MCU flash program once region (max 8 bytes). + + :param index: Start index + :param count: Count of bytes + :return: Data read; None in case of an failure + :raises SPSDKError: When invalid count of bytes. Must be 4 or 8 + """ + if count not in (4, 8): + raise SPSDKError("Invalid count of bytes. Must be 4 or 8") + logger.info(f"CMD: FlashReadOnce(index={index}, bytes={count})") + cmd_packet = CmdPacket( + CommandTag.FLASH_READ_ONCE, CommandFlag.NONE.tag, index, count + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, FlashReadOnceResponse) + return cmd_response.data + return None + + def flash_program_once(self, index: int, data: bytes) -> bool: + """Write into MCU flash program once region (max 8 bytes). + + :param index: Start index + :param data: Input data aligned to 4 or 8 bytes + :return: False in case of any problem; True otherwise + :raises SPSDKError: When invalid length of data. Must be aligned to 4 or 8 bytes + """ + if len(data) not in (4, 8): + raise SPSDKError("Invalid length of data. Must be aligned to 4 or 8 bytes") + logger.info(f"CMD: FlashProgramOnce(index={index!r}, data={data!r})") + cmd_packet = CmdPacket( + CommandTag.FLASH_PROGRAM_ONCE, + CommandFlag.NONE.tag, + index, + len(data), + data=data, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def flash_read_resource( + self, address: int, length: int, option: int = 1 + ) -> Optional[bytes]: + """Read resource of flash module. + + :param address: Start address + :param length: Number of bytes + :param option: Area to be read. 0 means Flash IFR, 1 means Flash Firmware ID + :raises McuBootError: when the length is not aligned to 4 bytes + :return: Data from the resource; None in case of an failure + """ + if length % 4: + raise McuBootError( + "The number of bytes to read is not aligned to the 4 bytes" + ) + logger.info( + f"CMD: FlashReadResource(address=0x{address:08X}, length={length}, option={option})" + ) + cmd_packet = CmdPacket( + CommandTag.FLASH_READ_RESOURCE, + CommandFlag.NONE.tag, + address, + length, + option, + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, FlashReadResourceResponse) + return self._read_data(CommandTag.FLASH_READ_RESOURCE, cmd_response.length) + return None + + def configure_memory(self, address: int, mem_id: int) -> bool: + """Configure memory. + + :param address: The address in memory where are locating configuration data + :param mem_id: Memory ID + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: ConfigureMemory({mem_id}, address=0x{address:08X})") + cmd_packet = CmdPacket( + CommandTag.CONFIGURE_MEMORY, CommandFlag.NONE.tag, mem_id, address + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def reliable_update(self, address: int) -> bool: + """Reliable Update. + + :param address: Address where new the firmware is stored + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: ReliableUpdate(address=0x{address:08X})") + cmd_packet = CmdPacket( + CommandTag.RELIABLE_UPDATE, CommandFlag.NONE.tag, address + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def generate_key_blob( + self, + dek_data: bytes, + key_sel: int = GenerateKeyBlobSelect.OPTMK.tag, + count: int = 72, + ) -> Optional[bytes]: + """Generate Key Blob. + + :param dek_data: Data Encryption Key as bytes + :param key_sel: select the BKEK used to wrap the BK (default: OPTMK/FUSES) + :param count: Key blob count (default: 72 - AES128bit) + :return: Key blob; None in case of an failure + """ + logger.info( + f"CMD: GenerateKeyBlob(dek_len={len(dek_data)}, key_sel={key_sel}, count={count})" + ) + data_chunks = self._split_data(data=dek_data) + cmd_response = self._process_cmd( + CmdPacket( + CommandTag.GENERATE_KEY_BLOB, + CommandFlag.HAS_DATA_PHASE.tag, + key_sel, + len(dek_data), + 0, + ) + ) + if cmd_response.status != StatusCode.SUCCESS: + return None + if not self._send_data(CommandTag.GENERATE_KEY_BLOB, data_chunks): + return None + cmd_response = self._process_cmd( + CmdPacket( + CommandTag.GENERATE_KEY_BLOB, CommandFlag.NONE.tag, key_sel, count, 1 + ) + ) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, ReadMemoryResponse) + return self._read_data(CommandTag.GENERATE_KEY_BLOB, cmd_response.length) + return None + + def kp_enroll(self) -> bool: + """Key provisioning: Enroll Command (start PUF). + + :return: False in case of any problem; True otherwise + """ + logger.info("CMD: [KeyProvisioning] Enroll") + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.NONE.tag, + KeyProvOperation.ENROLL.tag, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def kp_set_intrinsic_key(self, key_type: int, key_size: int) -> bool: + """Key provisioning: Generate Intrinsic Key. + + :param key_type: Type of the key + :param key_size: Size of the key + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: [KeyProvisioning] SetIntrinsicKey(type={key_type}, key_size={key_size})" + ) + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.NONE.tag, + KeyProvOperation.SET_INTRINSIC_KEY.tag, + key_type, + key_size, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def kp_write_nonvolatile(self, mem_id: int = 0) -> bool: + """Key provisioning: Write the key to a nonvolatile memory. + + :param mem_id: The memory ID (default: 0) + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: [KeyProvisioning] WriteNonVolatileMemory(mem_id={mem_id})") + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.NONE.tag, + KeyProvOperation.WRITE_NON_VOLATILE.tag, + mem_id, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def kp_read_nonvolatile(self, mem_id: int = 0) -> bool: + """Key provisioning: Load the key from a nonvolatile memory to bootloader. + + :param mem_id: The memory ID (default: 0) + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: [KeyProvisioning] ReadNonVolatileMemory(mem_id={mem_id})") + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.NONE.tag, + KeyProvOperation.READ_NON_VOLATILE.tag, + mem_id, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def kp_set_user_key(self, key_type: int, key_data: bytes) -> bool: + """Key provisioning: Send the user key specified by to bootloader. + + :param key_type: type of the user key, see enumeration for details + :param key_data: binary content of the user key + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: [KeyProvisioning] SetUserKey(key_type={key_type}, " + f"key_len={len(key_data)})" + ) + data_chunks = self._split_data(data=key_data) + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.HAS_DATA_PHASE.tag, + KeyProvOperation.SET_USER_KEY.tag, + key_type, + len(key_data), + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + return self._send_data(CommandTag.KEY_PROVISIONING, data_chunks) + return False + + def kp_write_key_store(self, key_data: bytes) -> bool: + """Key provisioning: Write key data into key store area. + + :param key_data: key store binary content to be written to processor + :return: result of the operation; True means success + """ + logger.info(f"CMD: [KeyProvisioning] WriteKeyStore(key_len={len(key_data)})") + data_chunks = self._split_data(data=key_data) + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.HAS_DATA_PHASE.tag, + KeyProvOperation.WRITE_KEY_STORE.tag, + 0, + len(key_data), + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + return self._send_data(CommandTag.KEY_PROVISIONING, data_chunks) + return False + + def kp_read_key_store(self) -> Optional[bytes]: + """Key provisioning: Read key data from key store area.""" + logger.info("CMD: [KeyProvisioning] ReadKeyStore") + cmd_packet = CmdPacket( + CommandTag.KEY_PROVISIONING, + CommandFlag.NONE.tag, + KeyProvOperation.READ_KEY_STORE.tag, + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, KeyProvisioningResponse) + return self._read_data(CommandTag.KEY_PROVISIONING, cmd_response.length) + return None + + def load_image( + self, + data: bytes, + progress_callback: Optional[Callable[[int, int], None]] = None, + ) -> bool: + """Load a boot image to the device. + + :param data: boot image + :param progress_callback: Callback for updating the caller about the progress + :return: False in case of any problem; True otherwise + """ + logger.info(f"CMD: LoadImage(length={len(data)})") + data_chunks = self._split_data(data) + # there's no command in this case + self._status_code = StatusCode.SUCCESS.tag + return self._send_data(CommandTag.NO_COMMAND, data_chunks, progress_callback) + + def tp_prove_genuinity(self, address: int, buffer_size: int) -> Optional[int]: + """Start the process of proving genuinity. + + :param address: Address where to prove genuinity request (challenge) container + :param buffer_size: Maximum size of the response package (limit 0xFFFF) + :raises McuBootError: Invalid input parameters + :return: True if prove_genuinity operation is successfully completed + """ + logger.info( + f"CMD: [TrustProvisioning] ProveGenuinity(address={hex(address)}, " + f"buffer_size={buffer_size})" + ) + if buffer_size > 0xFFFF: + raise McuBootError("buffer_size must be less than 0xFFFF") + address_msb = (address >> 32) & 0xFFFF_FFFF + address_lsb = address & 0xFFFF_FFFF + sentinel_cmd = _tp_sentinel_frame( + TrustProvOperation.PROVE_GENUINITY.tag, + args=[address_msb, address_lsb, buffer_size], + ) + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, CommandFlag.NONE.tag, data=sentinel_cmd + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + assert isinstance(cmd_response, TrustProvisioningResponse) + return cmd_response.values[0] + return None + + def tp_set_wrapped_data( + self, address: int, stage: int = 0x4B, control: int = 1 + ) -> bool: + """Start the process of setting OEM data. + + :param address: Address where the wrapped data container on target + :param control: 1 - use the address, 2 - use container within the firmware, defaults to 1 + :param stage: Stage of TrustProvisioning flow, defaults to 0x4B + :return: True if set_wrapped_data operation is successfully completed + """ + logger.info(f"CMD: [TrustProvisioning] SetWrappedData(address={hex(address)})") + if address == 0: + control = 2 + + address_msb = (address >> 32) & 0xFFFF_FFFF + address_lsb = address & 0xFFFF_FFFF + stage_control = control << 8 | stage + sentinel_cmd = _tp_sentinel_frame( + TrustProvOperation.ISP_SET_WRAPPED_DATA.tag, + args=[stage_control, address_msb, address_lsb], + ) + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, CommandFlag.NONE.tag, data=sentinel_cmd + ) + cmd_response = self._process_cmd(cmd_packet) + return cmd_response.status == StatusCode.SUCCESS + + def fuse_program(self, address: int, data: bytes, mem_id: int = 0) -> bool: + """Program fuse. + + :param address: Start address + :param data: List of bytes + :param mem_id: Memory ID + :return: False in case of any problem; True otherwise + """ + logger.info( + f"CMD: FuseProgram(address=0x{address:08X}, length={len(data)}, mem_id={mem_id})" + ) + data_chunks = self._split_data(data=data) + mem_id = _clamp_down_memory_id(memory_id=mem_id) + cmd_packet = CmdPacket( + CommandTag.FUSE_PROGRAM, + CommandFlag.HAS_DATA_PHASE.tag, + address, + len(data), + mem_id, + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: # pragma: no cover + # command is not supported in any device, thus we can't measure coverage + return self._send_data(CommandTag.FUSE_PROGRAM, data_chunks) + return False + + def fuse_read(self, address: int, length: int, mem_id: int = 0) -> Optional[bytes]: + """Read fuse. + + :param address: Start address + :param length: Count of bytes + :param mem_id: Memory ID + :return: Data read from the fuse; None in case of a failure + """ + logger.info( + f"CMD: ReadFuse(address=0x{address:08X}, length={length}, mem_id={mem_id})" + ) + mem_id = _clamp_down_memory_id(memory_id=mem_id) + cmd_packet = CmdPacket( + CommandTag.FUSE_READ, CommandFlag.NONE.tag, address, length, mem_id + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: # pragma: no cover + # command is not supported in any device, thus we can't measure coverage + assert isinstance(cmd_response, ReadMemoryResponse) + return self._read_data(CommandTag.FUSE_READ, cmd_response.length) + return None + + def update_life_cycle(self, life_cycle: int) -> bool: + """Update device life cycle. + + :param life_cycle: New life cycle value. + :return: False in case of any problems, True otherwise. + """ + logger.info(f"CMD: UpdateLifeCycle (life cycle=0x{life_cycle:02X})") + cmd_packet = CmdPacket( + CommandTag.UPDATE_LIFE_CYCLE, CommandFlag.NONE.tag, life_cycle + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def ele_message( + self, cmdMsgAddr: int, cmdMsgCnt: int, respMsgAddr: int, respMsgCnt: int + ) -> bool: + """Send EdgeLock Enclave message. + + :param cmdMsgAddr: Address in RAM where is prepared the command message words + :param cmdMsgCnt: Count of 32bits command words + :param respMsgAddr: Address in RAM where the command store the response + :param respMsgCnt: Count of 32bits response words + + :return: False in case of any problems, True otherwise. + """ + logger.info( + f"CMD: EleMessage Command (cmdMsgAddr=0x{cmdMsgAddr:08X}, cmdMsgCnt={cmdMsgCnt})" + ) + if respMsgCnt: + logger.info( + f"CMD: EleMessage Response (respMsgAddr=0x{respMsgAddr:08X}, respMsgCnt={respMsgCnt})" + ) + cmd_packet = CmdPacket( + CommandTag.ELE_MESSAGE, + CommandFlag.NONE.tag, + 0, # reserved for future use as a sub command ID or anything else + cmdMsgAddr, + cmdMsgCnt, + respMsgAddr, + respMsgCnt, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def tp_hsm_gen_key( + self, + key_type: int, + reserved: int, + key_blob_output_addr: int, + key_blob_output_size: int, + ecdsa_puk_output_addr: int, + ecdsa_puk_output_size: int, + ) -> Optional[List[int]]: + """Trust provisioning: OEM generate common keys. + + :param key_type: Key to generate (MFW_ISK, MFW_ENCK, GEN_SIGNK, GET_CUST_MK_SK) + :param reserved: Reserved, must be zero + :param key_blob_output_addr: The output buffer address where ROM writes the key blob to + :param key_blob_output_size: The output buffer size in byte + :param ecdsa_puk_output_addr: The output buffer address where ROM writes the public key to + :param ecdsa_puk_output_size: The output buffer size in byte + :return: Return byte count of the key blob + byte count of the public key from the device; + None in case of an failure + """ + logger.info("CMD: [TrustProvisioning] OEM generate common keys") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.HSM_GEN_KEY.tag, + key_type, + reserved, + key_blob_output_addr, + key_blob_output_size, + ecdsa_puk_output_addr, + ecdsa_puk_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values + return None + + def tp_oem_gen_master_share( + self, + oem_share_input_addr: int, + oem_share_input_size: int, + oem_enc_share_output_addr: int, + oem_enc_share_output_size: int, + oem_enc_master_share_output_addr: int, + oem_enc_master_share_output_size: int, + oem_cust_cert_puk_output_addr: int, + oem_cust_cert_puk_output_size: int, + ) -> Optional[List[int]]: + """Takes the entropy seed provided by the OEM as input. + + :param oem_share_input_addr: The input buffer address + where the OEM Share(entropy seed) locates at + :param oem_share_input_size: The byte count of the OEM Share + :param oem_enc_share_output_addr: The output buffer address + where ROM writes the Encrypted OEM Share to + :param oem_enc_share_output_size: The output buffer size in byte + :param oem_enc_master_share_output_addr: The output buffer address + where ROM writes the Encrypted OEM Master Share to + :param oem_enc_master_share_output_size: The output buffer size in byte. + :param oem_cust_cert_puk_output_addr: The output buffer address where + ROM writes the OEM Customer Certificate Public Key to + :param oem_cust_cert_puk_output_size: The output buffer size in byte + :return: Sizes of two encrypted blobs(the Encrypted OEM Share and the Encrypted OEM Master Share) + and a public key(the OEM Customer Certificate Public Key). + """ + logger.info("CMD: [TrustProvisioning] OEM generate master share") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.OEM_GEN_MASTER_SHARE.tag, + oem_share_input_addr, + oem_share_input_size, + oem_enc_share_output_addr, + oem_enc_share_output_size, + oem_enc_master_share_output_addr, + oem_enc_master_share_output_size, + oem_cust_cert_puk_output_addr, + oem_cust_cert_puk_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values + return None + + def tp_oem_set_master_share( + self, + oem_share_input_addr: int, + oem_share_input_size: int, + oem_enc_master_share_input_addr: int, + oem_enc_master_share_input_size: int, + ) -> bool: + """Takes the entropy seed and the Encrypted OEM Master Share. + + :param oem_share_input_addr: The input buffer address + where the OEM Share(entropy seed) locates at + :param oem_share_input_size: The byte count of the OEM Share + :param oem_enc_master_share_input_addr: The input buffer address + where the Encrypted OEM Master Share locates at + :param oem_enc_master_share_input_size: The byte count of the Encrypted OEM Master Share + :return: False in case of any problem; True otherwise + """ + logger.info( + "CMD: [TrustProvisioning] Takes the entropy seed and the Encrypted OEM Master Share." + ) + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.OEM_SET_MASTER_SHARE.tag, + oem_share_input_addr, + oem_share_input_size, + oem_enc_master_share_input_addr, + oem_enc_master_share_input_size, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def tp_oem_get_cust_cert_dice_puk( + self, + oem_rkth_input_addr: int, + oem_rkth_input_size: int, + oem_cust_cert_dice_puk_output_addr: int, + oem_cust_cert_dice_puk_output_size: int, + ) -> Optional[int]: + """Creates the initial trust provisioning keys. + + :param oem_rkth_input_addr: The input buffer address where the OEM RKTH locates at + :param oem_rkth_input_size: The byte count of the OEM RKTH + :param oem_cust_cert_dice_puk_output_addr: The output buffer address where ROM writes the OEM Customer + Certificate Public Key for DICE to + :param oem_cust_cert_dice_puk_output_size: The output buffer size in byte + :return: The byte count of the OEM Customer Certificate Public Key for DICE + """ + logger.info( + "CMD: [TrustProvisioning] Creates the initial trust provisioning keys" + ) + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.OEM_GET_CUST_CERT_DICE_PUK.tag, + oem_rkth_input_addr, + oem_rkth_input_size, + oem_cust_cert_dice_puk_output_addr, + oem_cust_cert_dice_puk_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def tp_hsm_store_key( + self, + key_type: int, + key_property: int, + key_input_addr: int, + key_input_size: int, + key_blob_output_addr: int, + key_blob_output_size: int, + ) -> Optional[List[int]]: + """Trust provisioning: OEM generate common keys. + + :param key_type: Key to generate (CKDFK, HKDFK, HMACK, CMACK, AESK, KUOK) + :param key_property: Bit 0: Key Size, 0 for 128bit, 1 for 256bit. + Bits 30-31: set key protection CSS mode. + :param key_input_addr: The input buffer address where the key locates at + :param key_input_size: The byte count of the key + :param key_blob_output_addr: The output buffer address where ROM writes the key blob to + :param key_blob_output_size: The output buffer size in byte + :return: Return header of the key blob + byte count of the key blob + (header is not included) from the device; None in case of an failure + """ + logger.info("CMD: [TrustProvisioning] OEM generate common keys") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.HSM_STORE_KEY.tag, + key_type, + key_property, + key_input_addr, + key_input_size, + key_blob_output_addr, + key_blob_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values + return None + + def tp_hsm_enc_blk( + self, + mfg_cust_mk_sk_0_blob_input_addr: int, + mfg_cust_mk_sk_0_blob_input_size: int, + kek_id: int, + sb3_header_input_addr: int, + sb3_header_input_size: int, + block_num: int, + block_data_addr: int, + block_data_size: int, + ) -> bool: + """Trust provisioning: Encrypt the given SB3 data block. + + :param mfg_cust_mk_sk_0_blob_input_addr: The input buffer address + where the CKDF Master Key Blob locates at + :param mfg_cust_mk_sk_0_blob_input_size: The byte count of the CKDF Master Key Blob + :param kek_id: The CKDF Master Key Encryption Key ID + (0x10: NXP_CUST_KEK_INT_SK, 0x11: NXP_CUST_KEK_EXT_SK) + :param sb3_header_input_addr: The input buffer address, + where the SB3 Header(block0) locates at + :param sb3_header_input_size: The byte count of the SB3 Header + :param block_num: The index of the block. Due to SB3 Header(block 0) is always unencrypted, + the index starts from block1 + :param block_data_addr: The buffer address where the SB3 data block locates at + :param block_data_size: The byte count of the SB3 data block + :return: False in case of any problem; True otherwise + """ + logger.info("CMD: [TrustProvisioning] Encrypt the given SB3 data block") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.HSM_ENC_BLOCK.tag, + mfg_cust_mk_sk_0_blob_input_addr, + mfg_cust_mk_sk_0_blob_input_size, + kek_id, + sb3_header_input_addr, + sb3_header_input_size, + block_num, + block_data_addr, + block_data_size, + ) + return self._process_cmd(cmd_packet).status == StatusCode.SUCCESS + + def tp_hsm_enc_sign( + self, + key_blob_input_addr: int, + key_blob_input_size: int, + block_data_input_addr: int, + block_data_input_size: int, + signature_output_addr: int, + signature_output_size: int, + ) -> Optional[int]: + """Signs the given data. + + :param key_blob_input_addr: The input buffer address where signing key blob locates at + :param key_blob_input_size: The byte count of the signing key blob + :param block_data_input_addr: The input buffer address where the data locates at + :param block_data_input_size: The byte count of the data + :param signature_output_addr: The output buffer address where ROM writes the signature to + :param signature_output_size: The output buffer size in byte + :return: Return signature size; None in case of an failure + """ + logger.info("CMD: [TrustProvisioning] HSM ENC SIGN") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvOperation.HSM_ENC_SIGN.tag, + key_blob_input_addr, + key_blob_input_size, + block_data_input_addr, + block_data_input_size, + signature_output_addr, + signature_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def wpc_get_id( + self, + wpc_id_blob_addr: int, + wpc_id_blob_size: int, + ) -> Optional[int]: + """Command used for harvesting device ID blob. + + :param wpc_id_blob_addr: Buffer address + :param wpc_id_blob_size: Buffer size + """ + logger.info("CMD: [TrustProvisioning] WPC GET ID") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvWpc.WPC_GET_ID.tag, + wpc_id_blob_addr, + wpc_id_blob_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def nxp_get_id( + self, + id_blob_addr: int, + id_blob_size: int, + ) -> Optional[int]: + """Command used for harvesting device ID blob during wafer test as part of RTS flow. + + :param id_blob_addr: address of ID blob defined by Round-trip trust provisioning specification. + :param id_blob_size: length of buffer in bytes + """ + logger.info("CMD: [TrustProvisioning] NXP GET ID") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvWpc.NXP_GET_ID.tag, + id_blob_addr, + id_blob_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def wpc_insert_cert( + self, + wpc_cert_addr: int, + wpc_cert_len: int, + ec_id_offset: int, + wpc_puk_offset: int, + ) -> Optional[int]: + """Command used for certificate validation before it is written into flash. + + This command does following things: + Extracts ECID and WPC PUK from certificate + Validates ECID and WPC PUK. If both are OK it returns success. Otherwise returns fail + + :param wpc_cert_addr: address of inserted certificate + :param wpc_cert_len: length in bytes of inserted certificate + :param ec_id_offset: offset to 72-bit ECID + :param wpc_puk_offset: WPC PUK offset from beginning of inserted certificate + """ + logger.info("CMD: [TrustProvisioning] WPC INSERT CERT") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvWpc.WPC_INSERT_CERT.tag, + wpc_cert_addr, + wpc_cert_len, + ec_id_offset, + wpc_puk_offset, + ) + cmd_response = self._process_cmd(cmd_packet) + if cmd_response.status == StatusCode.SUCCESS: + return 0 + return None + + def wpc_sign_csr( + self, + csr_tbs_addr: int, + csr_tbs_len: int, + signature_addr: int, + signature_len: int, + ) -> Optional[int]: + """Command used sign CSR data (TBS portion). + + :param csr_tbs_addr: address of CSR-TBS data + :param csr_tbs_len: length in bytes of CSR-TBS data + :param signature_addr: address where to store signature + :param signature_len: expected length of signature + :return: actual signature length + """ + logger.info("CMD: [TrustProvisioning] WPC SIGN CSR-TBS DATA") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvWpc.WPC_SIGN_CSR.tag, + csr_tbs_addr, + csr_tbs_len, + signature_addr, + signature_len, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def dsc_hsm_create_session( + self, + oem_seed_input_addr: int, + oem_seed_input_size: int, + oem_share_output_addr: int, + oem_share_output_size: int, + ) -> Optional[int]: + """Command used by OEM to provide it share to create the initial trust provisioning keys. + + :param oem_seed_input_addr: address of 128-bit entropy seed value provided by the OEM. + :param oem_seed_input_size: OEM seed size in bytes + :param oem_share_output_addr: A 128-bit encrypted token. + :param oem_share_output_size: size in bytes + """ + logger.info("CMD: [TrustProvisioning] DSC HSM CREATE SESSION") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvDevHsmDsc.DSC_HSM_CREATE_SESSION.tag, + oem_seed_input_addr, + oem_seed_input_size, + oem_share_output_addr, + oem_share_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def dsc_hsm_enc_blk( + self, + sbx_header_input_addr: int, + sbx_header_input_size: int, + block_num: int, + block_data_addr: int, + block_data_size: int, + ) -> Optional[int]: + """Command used to encrypt the given block sliced by the nxpimage. + + This command is only supported after issuance of dsc_hsm_create_session. + + :param sbx_header_input_addr: SBx header containing file size, Firmware version and Timestamp data. + Except for hash digest of block 0, all other fields should be valid. + :param sbx_header_input_size: size of the header in bytes + :param block_num: Number of block + :param block_data_addr: Address of data block + :param block_data_size: Size of data block + """ + logger.info("CMD: [TrustProvisioning] DSC HSM ENC BLK") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvDevHsmDsc.DSC_HSM_ENC_BLK.tag, + sbx_header_input_addr, + sbx_header_input_size, + block_num, + block_data_addr, + block_data_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + def dsc_hsm_enc_sign( + self, + block_data_input_addr: int, + block_data_input_size: int, + signature_output_addr: int, + signature_output_size: int, + ) -> Optional[int]: + """Command used for signing the data buffer provided. + + This command is only supported after issuance of dsc_hsm_create_session. + + :param block_data_input_addr: Address of data buffer to be signed + :param block_data_input_size: Size of data buffer in bytes + :param signature_output_addr: Address to output signature data + :param signature_output_size: Size of the output signature data in bytes + """ + logger.info("CMD: [TrustProvisioning] DSC HSM ENC SIGN") + cmd_packet = CmdPacket( + CommandTag.TRUST_PROVISIONING, + CommandFlag.NONE.tag, + TrustProvDevHsmDsc.DSC_HSM_ENC_SIGN.tag, + block_data_input_addr, + block_data_input_size, + signature_output_addr, + signature_output_size, + ) + cmd_response = self._process_cmd(cmd_packet) + if isinstance(cmd_response, TrustProvisioningResponse): + return cmd_response.values[0] + return None + + +#################### +# Helper functions # +#################### + + +def _tp_sentinel_frame( + command: int, args: List[int], tag: int = 0x17, version: int = 0 +) -> bytes: + """Prepare frame used by sentinel.""" + data = struct.pack("<4B", command, len(args), version, tag) + for item in args: + data += struct.pack(" int: + if memory_id > 255 or memory_id == 0: + return memory_id + logger.warning( + "Note: memoryId is not required when accessing mapped external memory" + ) + return 0 diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/memories.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/memories.py new file mode 100644 index 00000000..78cd28b2 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/memories.py @@ -0,0 +1,240 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Various types of memory identifiers used in the MBoot module.""" + +from typing import List, Optional, cast + +from ..utils.misc import size_fmt +from ..utils.spsdk_enum import SpsdkEnum + +LEGACY_MEM_ID = { + "internal": "INTERNAL", + "qspi": "QSPI", + "fuse": "FUSE", + "ifr": "IFR0", + "semcnor": "SEMC_NOR", + "flexspinor": "FLEX-SPI-NOR", + "semcnand": "SEMC-NAND", + "spinand": "SPI-NAND", + "spieeprom": "SPI-MEM", + "i2ceeprom": "I2C-MEM", + "sdcard": "SD", + "mmccard": "MMC", +} + + +######################################################################################################################## +# McuBoot External Memory ID +######################################################################################################################## +class MemIdEnum(SpsdkEnum): + """McuBoot Memory Base class.""" + + @classmethod + def get_legacy_str(cls, key: str) -> Optional[int]: + """Converts legacy str to new enum key. + + :param key: str value of legacy enum + :return: new enum value + """ + new_key = LEGACY_MEM_ID.get(key) + return cls.get_tag(new_key) if new_key else None + + @classmethod + def get_legacy_int(cls, key: int) -> Optional[str]: + """Converts legacy int to new enum key. + + :param key: int value of legacy enum + :return: new enum value + """ + if isinstance(key, int): + new_value = cls.from_tag(key) + if new_value: + return [k for k, v in LEGACY_MEM_ID.items() if v == new_value.label][0] + + return None + + +class ExtMemId(MemIdEnum): + """McuBoot External Memory Property Tags.""" + + QUAD_SPI0 = (1, "QSPI", "Quad SPI Memory 0") + IFR = (4, "IFR0", "Nonvolatile information register 0 (only used by SB loader)") + FUSE = (4, "FUSE", "Nonvolatile information register 0 (only used by SB loader)") + SEMC_NOR = (8, "SEMC-NOR", "SEMC NOR Memory") + FLEX_SPI_NOR = (9, "FLEX-SPI-NOR", "Flex SPI NOR Memory") + SPIFI_NOR = (10, "SPIFI-NOR", "SPIFI NOR Memory") + FLASH_EXEC_ONLY = (16, "FLASH-EXEC", "Execute-Only region on internal Flash") + SEMC_NAND = (256, "SEMC-NAND", "SEMC NAND Memory") + SPI_NAND = (257, "SPI-NAND", "SPI NAND Memory") + SPI_NOR_EEPROM = (272, "SPI-MEM", "SPI NOR/EEPROM Memory") + I2C_NOR_EEPROM = (273, "I2C-MEM", "I2C NOR/EEPROM Memory") + SD_CARD = (288, "SD", "eSD/SD/SDHC/SDXC Memory Card") + MMC_CARD = (289, "MMC", "MMC/eMMC Memory Card") + + +class MemId(MemIdEnum): + """McuBoot Internal/External Memory Property Tags.""" + + INTERNAL_MEMORY = ( + 0, + "RAM/FLASH", + "Internal RAM/FLASH (Used for the PRINCE configuration)", + ) + QUAD_SPI0 = (1, "QSPI", "Quad SPI Memory 0") + IFR = (4, "IFR0", "Nonvolatile information register 0 (only used by SB loader)") + FUSE = (4, "FUSE", "Nonvolatile information register 0 (only used by SB loader)") + SEMC_NOR = (8, "SEMC-NOR", "SEMC NOR Memory") + FLEX_SPI_NOR = (9, "FLEX-SPI-NOR", "Flex SPI NOR Memory") + SPIFI_NOR = (10, "SPIFI-NOR", "SPIFI NOR Memory") + FLASH_EXEC_ONLY = (16, "FLASH-EXEC", "Execute-Only region on internal Flash") + SEMC_NAND = (256, "SEMC-NAND", "SEMC NAND Memory") + SPI_NAND = (257, "SPI-NAND", "SPI NAND Memory") + SPI_NOR_EEPROM = (272, "SPI-MEM", "SPI NOR/EEPROM Memory") + I2C_NOR_EEPROM = (273, "I2C-MEM", "I2C NOR/EEPROM Memory") + SD_CARD = (288, "SD", "eSD/SD/SDHC/SDXC Memory Card") + MMC_CARD = (289, "MMC", "MMC/eMMC Memory Card") + + +######################################################################################################################## +# McuBoot External Memory Property Tags +######################################################################################################################## + + +class ExtMemPropTags(SpsdkEnum): + """McuBoot External Memory Property Tags.""" + + INIT_STATUS = (0x00000000, "INIT_STATUS") + START_ADDRESS = (0x00000001, "START_ADDRESS") + SIZE_IN_KBYTES = (0x00000002, "SIZE_IN_KBYTES") + PAGE_SIZE = (0x00000004, "PAGE_SIZE") + SECTOR_SIZE = (0x00000008, "SECTOR_SIZE") + BLOCK_SIZE = (0x00000010, "BLOCK_SIZE") + + +class MemoryRegion: + """Base class for memory regions.""" + + def __init__(self, start: int, end: int) -> None: + """Initialize the memory region object. + + :param start: start address of region + :param end: end address of region + + """ + self.start = start + self.end = end + self.size = end - start + 1 + + def __repr__(self) -> str: + return f"Memory region, start: {hex(self.start)}" + + def __str__(self) -> str: + return ( + f"0x{self.start:08X} - 0x{self.end:08X}; Total Size: {size_fmt(self.size)}" + ) + + +class RamRegion(MemoryRegion): + """RAM memory regions.""" + + def __init__(self, index: int, start: int, size: int) -> None: + """Initialize the RAM memory region object. + + :param index: number of region + :param start: start address of region + :param size: size of region + + """ + super().__init__(start, start + size - 1) + self.index = index + + def __repr__(self) -> str: + return f"RAM Memory region, start: {hex(self.start)}" + + def __str__(self) -> str: + return f"Region {self.index}: {super().__str__()}" + + +class FlashRegion(MemoryRegion): + """Flash memory regions.""" + + def __init__(self, index: int, start: int, size: int, sector_size: int) -> None: + """Initialize the Flash memory region object. + + :param index: number of region + :param start: start address of region + :param size: size of region + :param sector_size: size of sector + + """ + super().__init__(start, start + size - 1) + self.index = index + self.sector_size = sector_size + + def __repr__(self) -> str: + return f"Flash Memory region, start: {hex(self.start)}" + + def __str__(self) -> str: + msg = f"Region {self.index}: {super().__str__()} Sector size: {size_fmt(self.sector_size)}" + return msg + + +class ExtMemRegion(MemoryRegion): + """External memory regions.""" + + def __init__(self, mem_id: int, raw_values: Optional[List[int]] = None) -> None: + """Initialize the external memory region object. + + :param mem_id: ID of the external memory + :param raw_values: List of integers representing the property + + """ + self.mem_id = mem_id + if not raw_values: + self.value = None + return + super().__init__(0, 0) + self.start_address = ( + raw_values[1] if raw_values[0] & ExtMemPropTags.START_ADDRESS.tag else None + ) + self.total_size = ( + raw_values[2] * 1024 + if raw_values[0] & ExtMemPropTags.SIZE_IN_KBYTES.tag + else None + ) + self.page_size = ( + raw_values[3] if raw_values[0] & ExtMemPropTags.PAGE_SIZE.tag else None + ) + self.sector_size = ( + raw_values[4] if raw_values[0] & ExtMemPropTags.SECTOR_SIZE.tag else None + ) + self.block_size = ( + raw_values[5] if raw_values[0] & ExtMemPropTags.BLOCK_SIZE.tag else None + ) + self.value = raw_values[0] + + @property + def name(self) -> str: + """Get the name of external memory for given memory ID.""" + return ExtMemId.get_label(self.mem_id) + + def __repr__(self) -> str: + return f"EXT Memory region, name: {self.name}, start: {hex(self.start)}" + + def __str__(self) -> str: + if not self.value: + return "Not Configured" + info = f"Start Address = 0x{self.start_address:08X} " + if self.total_size: + info += f"Total Size = {size_fmt(self.total_size)} " + info += f"Page Size = {self.page_size} " + info += f"Sector Size = {self.sector_size} " + if self.block_size: + info += f"Block Size = {self.block_size} " + return info diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/properties.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/properties.py new file mode 100644 index 00000000..8bf30890 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/properties.py @@ -0,0 +1,866 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2016-2018 Martin Olejar +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Helper module for more human-friendly interpretation of the target device properties.""" + + +import ctypes +from copy import deepcopy +from typing import Callable, Dict, List, Optional, Tuple, Type, Union + +from ..exceptions import SPSDKKeyError +from ..mboot.exceptions import McuBootError +from ..utils.misc import Endianness +from ..utils.spsdk_enum import SpsdkEnum +from .commands import CommandTag +from .error_codes import StatusCode +from .memories import ExtMemPropTags, MemoryRegion + + +######################################################################################################################## +# McuBoot helper functions +######################################################################################################################## +def size_fmt(value: Union[int, float], kibibyte: bool = True) -> str: + """Convert size value into string format. + + :param value: The raw value + :param kibibyte: True if 1024 Bytes represent 1kB or False if 1000 Bytes represent 1kB + :return: Stringified value + """ + base, suffix = [(1000.0, "B"), (1024.0, "iB")][kibibyte] + x = "B" + for x in ["B"] + [prefix + suffix for prefix in list("kMGTP")]: + if -base < value < base: + break + value /= base + + return f"{value} {x}" if x == "B" else f"{value:3.1f} {x}" + + +######################################################################################################################## +# McuBoot helper classes +######################################################################################################################## + + +class Version: + """McuBoot current and target version type.""" + + def __init__(self, *args: Union[str, int], **kwargs: int): + """Initialize the Version object. + + :raises McuBootError: Argument passed the not str not int + """ + self.mark = kwargs.get("mark", "K") + self.major = kwargs.get("major", 0) + self.minor = kwargs.get("minor", 0) + self.fixation = kwargs.get("fixation", 0) + if args: + if isinstance(args[0], int): + self.from_int(args[0]) + elif isinstance(args[0], str): + self.from_str(args[0]) + else: + raise McuBootError("Value must be 'str' or 'int' type !") + + def __eq__(self, obj: object) -> bool: + return isinstance(obj, Version) and vars(obj) == vars(self) + + def __ne__(self, obj: object) -> bool: + return not self.__eq__(obj) + + def __lt__(self, obj: "Version") -> bool: + return self.to_int(True) < obj.to_int(True) + + def __le__(self, obj: "Version") -> bool: + return self.to_int(True) <= obj.to_int(True) + + def __gt__(self, obj: "Version") -> bool: + return self.to_int(True) > obj.to_int(True) + + def __ge__(self, obj: "Version") -> bool: + return self.to_int(True) >= obj.to_int(True) + + def __repr__(self) -> str: + return f"" + + def __str__(self) -> str: + return self.to_str() + + def from_int(self, value: int) -> None: + """Parse version data from raw int value. + + :param value: Raw integer input + """ + mark = (value >> 24) & 0xFF + self.mark = chr(mark) if 64 < mark < 91 else None # type: ignore + self.major = (value >> 16) & 0xFF + self.minor = (value >> 8) & 0xFF + self.fixation = value & 0xFF + + def from_str(self, value: str) -> None: + """Parse version data from string value. + + :param value: String representation input + """ + mark_major, minor, fixation = value.split(".") + if len(mark_major) > 1 and mark_major[0] not in "0123456789": + self.mark = mark_major[0] + self.major = int(mark_major[1:]) + else: + self.major = int(mark_major) + self.minor = int(minor) + self.fixation = int(fixation) + + def to_int(self, no_mark: bool = False) -> int: + """Get version value in raw integer format. + + :param no_mark: If True, return value without mark + :return: Integer representation + """ + value = self.major << 16 | self.minor << 8 | self.fixation + mark = 0 if no_mark or self.mark is None else ord(self.mark) << 24 # type: ignore + return value | mark + + def to_str(self, no_mark: bool = False) -> str: + """Get version value in readable string format. + + :param no_mark: If True, return value without mark + :return: String representation + """ + value = f"{self.major}.{self.minor}.{self.fixation}" + mark = "" if no_mark or self.mark is None else self.mark + return f"{mark}{value}" + + +######################################################################################################################## +# McuBoot Properties +######################################################################################################################## + +# fmt: off +class PropertyTag(SpsdkEnum): + """McuBoot Properties.""" + + LIST_PROPERTIES = (0x00, 'ListProperties', 'List Properties') + CURRENT_VERSION = (0x01, "CurrentVersion", "Current Version") + AVAILABLE_PERIPHERALS = (0x02, "AvailablePeripherals", "Available Peripherals") + FLASH_START_ADDRESS = (0x03, "FlashStartAddress", "Flash Start Address") + FLASH_SIZE = (0x04, "FlashSize", "Flash Size") + FLASH_SECTOR_SIZE = (0x05, "FlashSectorSize", "Flash Sector Size") + FLASH_BLOCK_COUNT = (0x06, "FlashBlockCount", "Flash Block Count") + AVAILABLE_COMMANDS = (0x07, "AvailableCommands", "Available Commands") + CRC_CHECK_STATUS = (0x08, "CrcCheckStatus", "CRC Check Status") + LAST_ERROR = (0x09, "LastError", "Last Error Value") + VERIFY_WRITES = (0x0A, "VerifyWrites", "Verify Writes") + MAX_PACKET_SIZE = (0x0B, "MaxPacketSize", "Max Packet Size") + RESERVED_REGIONS = (0x0C, "ReservedRegions", "Reserved Regions") + VALIDATE_REGIONS = (0x0D, "ValidateRegions", "Validate Regions") + RAM_START_ADDRESS = (0x0E, "RamStartAddress", "RAM Start Address") + RAM_SIZE = (0x0F, "RamSize", "RAM Size") + SYSTEM_DEVICE_IDENT = (0x10, "SystemDeviceIdent", "System Device Identification") + FLASH_SECURITY_STATE = (0x11, "FlashSecurityState", "Security State") + UNIQUE_DEVICE_IDENT = (0x12, "UniqueDeviceIdent", "Unique Device Identification") + FLASH_FAC_SUPPORT = (0x13, "FlashFacSupport", "Flash Fac. Support") + FLASH_ACCESS_SEGMENT_SIZE = (0x14, "FlashAccessSegmentSize", "Flash Access Segment Size",) + FLASH_ACCESS_SEGMENT_COUNT = (0x15, "FlashAccessSegmentCount", "Flash Access Segment Count",) + FLASH_READ_MARGIN = (0x16, "FlashReadMargin", "Flash Read Margin") + QSPI_INIT_STATUS = (0x17, "QspiInitStatus", "QuadSPI Initialization Status") + TARGET_VERSION = (0x18, "TargetVersion", "Target Version") + EXTERNAL_MEMORY_ATTRIBUTES = (0x19, "ExternalMemoryAttributes", "External Memory Attributes",) + RELIABLE_UPDATE_STATUS = (0x1A, "ReliableUpdateStatus", "Reliable Update Status") + FLASH_PAGE_SIZE = (0x1B, "FlashPageSize", "Flash Page Size") + IRQ_NOTIFIER_PIN = (0x1C, "IrqNotifierPin", "Irq Notifier Pin") + PFR_KEYSTORE_UPDATE_OPT = (0x1D, "PfrKeystoreUpdateOpt", "PFR Keystore Update Opt") + BYTE_WRITE_TIMEOUT_MS = (0x1E, "ByteWriteTimeoutMs", "Byte Write Timeout in ms") + FUSE_LOCKED_STATUS = (0x1F, "FuseLockedStatus", "Fuse Locked Status") + UNKNOWN = (0xFF, "Unknown", "Unknown property") + + +class PropertyTagKw45xx(SpsdkEnum): + """McuBoot Properties.""" + + VERIFY_ERASE = (0x0A, "VerifyErase", "Verify Erase") + BOOT_STATUS_REGISTER = (0x14, "BootStatusRegister", "Boot Status Register",) + FIRMWARE_VERSION = (0x15, "FirmwareVersion", "Firmware Version",) + FUSE_PROGRAM_VOLTAGE = (0x16, "FuseProgramVoltage", "Fuse Program Voltage") + +class PeripheryTag(SpsdkEnum): + """Tags representing peripherals.""" + + UART = (0x01, "UART", "UART Interface") + I2C_SLAVE = (0x02, "I2C-Slave", "I2C Slave Interface") + SPI_SLAVE = (0x04, "SPI-Slave", "SPI Slave Interface") + CAN = (0x08, "CAN", "CAN Interface") + USB_HID = (0x10, "USB-HID", "USB HID-Class Interface") + USB_CDC = (0x20, "USB-CDC", "USB CDC-Class Interface") + USB_DFU = (0x40, "USB-DFU", "USB DFU-Class Interface") + LIN = (0x80, "LIN", "LIN Interface") + + +class FlashReadMargin(SpsdkEnum): + """Scopes for flash read.""" + + NORMAL = (0, "NORMAL") + USER = (1, "USER") + FACTORY = (2, "FACTORY") + + +class PfrKeystoreUpdateOpt(SpsdkEnum): + """Options for PFR updating.""" + + KEY_PROVISIONING = (0, "KEY_PROVISIONING", "KeyProvisioning") + WRITE_MEMORY = (1, "WRITE_MEMORY", "WriteMemory") +# fmt: on + +######################################################################################################################## +# McuBoot Properties Values +######################################################################################################################## + + +class PropertyValueBase: + """Base class for property value.""" + + __slots__ = ("tag", "name", "desc") + + def __init__( + self, tag: int, name: Optional[str] = None, desc: Optional[str] = None + ) -> None: + """Initialize the base of property. + + :param tag: Property tag, see: `PropertyTag` + :param name: Optional name for the property + :param desc: Optional description for the property + """ + self.tag = tag + self.name = name or PropertyTag.get_label(tag) or "" + self.desc = desc or PropertyTag.get_description(tag, "") + + def __str__(self) -> str: + return f"{self.desc} = {self.to_str()}" + + def to_str(self) -> str: + """Stringified representation of a property. + + Derived classes should implement this function. + + :return: String representation + :raises NotImplementedError: Derived class has to implement this method + """ + raise NotImplementedError("Derived class has to implement this method.") + + +class IntValue(PropertyValueBase): + """Integer-based value property.""" + + __slots__ = ( + "value", + "_fmt", + ) + + def __init__( + self, tag: int, raw_values: List[int], str_format: str = "dec" + ) -> None: + """Initialize the integer-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + :param str_format: Format to display the value ('dec', 'hex', 'size') + """ + super().__init__(tag) + self._fmt = str_format + self.value = raw_values[0] + + def to_int(self) -> int: + """Get the raw integer property representation.""" + return self.value + + def to_str(self) -> str: + """Get stringified property representation.""" + if self._fmt == "size": + str_value = size_fmt(self.value) + elif self._fmt == "hex": + str_value = f"0x{self.value:08X}" + elif self._fmt == "dec": + str_value = str(self.value) + elif self._fmt == "int32": + str_value = str(ctypes.c_int32(self.value).value) + else: + str_value = self._fmt.format(self.value) + return str_value + + +class BoolValue(PropertyValueBase): + """Boolean-based value property.""" + + __slots__ = ( + "value", + "_true_values", + "_false_values", + "_true_string", + "_false_string", + ) + + def __init__( + self, + tag: int, + raw_values: List[int], + true_values: Tuple[int] = (1,), + true_string: str = "YES", + false_values: Tuple[int] = (0,), + false_string: str = "NO", + ) -> None: + """Initialize the Boolean-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + :param true_values: Values representing 'True', defaults to (1,) + :param true_string: String representing 'True, defaults to 'YES' + :param false_values: Values representing 'False', defaults to (0,) + :param false_string: String representing 'False, defaults to 'NO' + """ + super().__init__(tag) + self._true_values = true_values + self._true_string = true_string + self._false_values = false_values + self._false_string = false_string + self.value = raw_values[0] + + def __bool__(self) -> bool: + return self.value in self._true_values + + def to_int(self) -> int: + """Get the raw integer portion of the property.""" + return self.value + + def to_str(self) -> str: + """Get stringified property representation.""" + return ( + self._true_string if self.value in self._true_values else self._false_string + ) + + +class EnumValue(PropertyValueBase): + """Enumeration value property.""" + + __slots__ = ("value", "enum", "_na_msg") + + def __init__( + self, + tag: int, + raw_values: List[int], + enum: Type[SpsdkEnum], + na_msg: str = "Unknown Item", + ) -> None: + """Initialize the enumeration-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + :param enum: Enumeration to pick from + :param na_msg: Message to display if an item is not found in the enum + """ + super().__init__(tag) + self._na_msg = na_msg + self.enum = enum + self.value = raw_values[0] + + def to_int(self) -> int: + """Get the raw integer portion of the property.""" + return self.value + + def to_str(self) -> str: + """Get stringified property representation.""" + try: + return self.enum.get_label(self.value) + except SPSDKKeyError: + return f"{self._na_msg}: {self.value}" + + +class VersionValue(PropertyValueBase): + """Version property class.""" + + __slots__ = ("value",) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the Version-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.value = Version(raw_values[0]) + + def to_int(self) -> int: + """Get the raw integer portion of the property.""" + return self.value.to_int() + + def to_str(self) -> str: + """Get stringified property representation.""" + return self.value.to_str() + + +class DeviceUidValue(PropertyValueBase): + """Device UID value property.""" + + __slots__ = ("value",) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the Version-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.value = b"".join( + [ + int.to_bytes(val, length=4, byteorder=Endianness.LITTLE.value) + for val in raw_values + ] + ) + + def to_int(self) -> int: + """Get the raw integer portion of the property.""" + return int.from_bytes(self.value, byteorder=Endianness.BIG.value) + + def to_str(self) -> str: + """Get stringified property representation.""" + return " ".join(f"{item:02X}" for item in self.value) + + +class ReservedRegionsValue(PropertyValueBase): + """Reserver Regions property.""" + + __slots__ = ("regions",) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the ReserverRegion-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.regions: List[MemoryRegion] = [] + for i in range(0, len(raw_values), 2): + if raw_values[i + 1] == 0: + continue + self.regions.append(MemoryRegion(raw_values[i], raw_values[i + 1])) + + def __str__(self) -> str: + return f"{self.desc} =\n{self.to_str()}" + + def to_str(self) -> str: + """Get stringified property representation.""" + return "\n".join( + [f" Region {i}: {region}" for i, region in enumerate(self.regions)] + ) + + +class AvailablePeripheralsValue(PropertyValueBase): + """Available Peripherals property.""" + + __slots__ = ("value",) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the AvailablePeripherals-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.value = raw_values[0] + + def to_int(self) -> int: + """Get the raw integer portion of the property.""" + return self.value + + def to_str(self) -> str: + """Get stringified property representation.""" + return ", ".join( + [ + peripheral_tag.label + for peripheral_tag in PeripheryTag + if peripheral_tag.tag & self.value + ] + ) + + +class AvailableCommandsValue(PropertyValueBase): + """Available commands property.""" + + __slots__ = ("value",) + + @property + def tags(self) -> List[str]: + """List of tags representing Available commands.""" + return [ + cmd_tag.tag # type: ignore + for cmd_tag in CommandTag + if cmd_tag.tag > 0 and (1 << cmd_tag.tag - 1) & self.value + ] + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the AvailableCommands-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.value = raw_values[0] + + def __contains__(self, item: int) -> bool: + return isinstance(item, int) and bool((1 << item - 1) & self.value) + + def to_str(self) -> str: + """Get stringified property representation.""" + return [ + cmd_tag.label # type: ignore + for cmd_tag in CommandTag + if cmd_tag.tag > 0 and (1 << cmd_tag.tag - 1) & self.value + ] + + +class IrqNotifierPinValue(PropertyValueBase): + """IRQ notifier pin property.""" + + __slots__ = ("value",) + + @property + def pin(self) -> int: + """Number of the pin used for reporting IRQ.""" + return self.value & 0xFF + + @property + def port(self) -> int: + """Number of the port used for reporting IRQ.""" + return (self.value >> 8) & 0xFF + + @property + def enabled(self) -> bool: + """Indicates whether IRQ reporting is enabled.""" + return bool(self.value & (1 << 32)) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the IrqNotifierPin-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.value = raw_values[0] + + def __bool__(self) -> bool: + return self.enabled + + def to_str(self) -> str: + """Get stringified property representation.""" + return f"IRQ Port[{self.port}], Pin[{self.pin}] is {'enabled' if self.enabled else 'disabled'}" + + +class ExternalMemoryAttributesValue(PropertyValueBase): + """Attributes for external memories.""" + + __slots__ = ( + "value", + "mem_id", + "start_address", + "total_size", + "page_size", + "sector_size", + "block_size", + ) + + def __init__(self, tag: int, raw_values: List[int], mem_id: int = 0) -> None: + """Initialize the ExternalMemoryAttributes-based property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + :param mem_id: ID of the external memory + """ + super().__init__(tag) + self.mem_id = mem_id + self.start_address = ( + raw_values[1] if raw_values[0] & ExtMemPropTags.START_ADDRESS.tag else None + ) + self.total_size = ( + raw_values[2] * 1024 + if raw_values[0] & ExtMemPropTags.SIZE_IN_KBYTES.tag + else None + ) + self.page_size = ( + raw_values[3] if raw_values[0] & ExtMemPropTags.PAGE_SIZE.tag else None + ) + self.sector_size = ( + raw_values[4] if raw_values[0] & ExtMemPropTags.SECTOR_SIZE.tag else None + ) + self.block_size = ( + raw_values[5] if raw_values[0] & ExtMemPropTags.BLOCK_SIZE.tag else None + ) + self.value = raw_values[0] + + def to_str(self) -> str: + """Get stringified property representation.""" + str_values = [] + if self.start_address is not None: + str_values.append(f"Start Address: 0x{self.start_address:08X}") + if self.total_size is not None: + str_values.append(f"Total Size: {size_fmt(self.total_size)}") + if self.page_size is not None: + str_values.append(f"Page Size: {size_fmt(self.page_size)}") + if self.sector_size is not None: + str_values.append(f"Sector Size: {size_fmt(self.sector_size)}") + if self.block_size is not None: + str_values.append(f"Block Size: {size_fmt(self.block_size)}") + return ", ".join(str_values) + + +class FuseLock: + """Fuse Lock.""" + + def __init__(self, index: int, locked: bool) -> None: + """Initialize object representing information about fuse lock. + + :param index: value of OTP index + :param locked: status of the lock, true if locked + """ + self.index = index + self.locked = locked + + def __str__(self) -> str: + status = "LOCKED" if self.locked else "UNLOCKED" + return f" FUSE{(self.index):03d}: {status}\r\n" + + +class FuseLockRegister: + """Fuse Lock Register.""" + + def __init__(self, value: int, index: int, start: int = 0) -> None: + """Initialize object representing the OTP Controller Program Locked Status. + + :param value: value of the register + :param index: index of the fuse + :param start: shift to the start of the register + + """ + self.value = value + self.index = index + self.msg = "" + self.bitfields: List[FuseLock] = [] + + shift = 0 + for _ in range(start, 32): + locked = (value >> shift) & 1 + self.bitfields.append(FuseLock(index + shift, bool(locked))) + shift += 1 + + def __str__(self) -> str: + """Get stringified property representation.""" + if self.bitfields: + for bitfield in self.bitfields: + self.msg += str(bitfield) + return f"\r\n{self.msg}" + + +class FuseLockedStatus(PropertyValueBase): + """Class representing FuseLocked registers.""" + + __slots__ = ("fuses",) + + def __init__(self, tag: int, raw_values: List[int]) -> None: + """Initialize the FuseLockedStatus property object. + + :param tag: Property tag, see: `PropertyTag` + :param raw_values: List of integers representing the property + """ + super().__init__(tag) + self.fuses: List[FuseLockRegister] = [] + idx = 0 + for count, val in enumerate(raw_values): + start = 0 + if count == 0: + start = 16 + self.fuses.append(FuseLockRegister(val, idx, start)) + idx += 32 + if count == 0: + idx -= 16 + + def to_str(self) -> str: + """Get stringified property representation.""" + msg = "\r\n" + for count, register in enumerate(self.fuses): + msg += f"OTP Controller Program Locked Status {count} Register: {register}" + return msg + + def get_fuses(self) -> List[FuseLock]: + """Get list of fuses bitfield objects. + + :return: list of FuseLockBitfield objects + """ + fuses = [] + for registers in self.fuses: + fuses.extend(registers.bitfields) + return fuses + + +######################################################################################################################## +# McuBoot property response parser +######################################################################################################################## + +PROPERTIES: Dict[PropertyTag, Dict] = { + PropertyTag.CURRENT_VERSION: {"class": VersionValue, "kwargs": {}}, + PropertyTag.AVAILABLE_PERIPHERALS: { + "class": AvailablePeripheralsValue, + "kwargs": {}, + }, + PropertyTag.FLASH_START_ADDRESS: { + "class": IntValue, + "kwargs": {"str_format": "hex"}, + }, + PropertyTag.FLASH_SIZE: {"class": IntValue, "kwargs": {"str_format": "size"}}, + PropertyTag.FLASH_SECTOR_SIZE: { + "class": IntValue, + "kwargs": {"str_format": "size"}, + }, + PropertyTag.FLASH_BLOCK_COUNT: {"class": IntValue, "kwargs": {"str_format": "dec"}}, + PropertyTag.AVAILABLE_COMMANDS: {"class": AvailableCommandsValue, "kwargs": {}}, + PropertyTag.CRC_CHECK_STATUS: { + "class": EnumValue, + "kwargs": {"enum": StatusCode, "na_msg": "Unknown CRC Status code"}, + }, + PropertyTag.VERIFY_WRITES: { + "class": BoolValue, + "kwargs": {"true_string": "ON", "false_string": "OFF"}, + }, + PropertyTag.LAST_ERROR: { + "class": EnumValue, + "kwargs": {"enum": StatusCode, "na_msg": "Unknown Error"}, + }, + PropertyTag.MAX_PACKET_SIZE: {"class": IntValue, "kwargs": {"str_format": "size"}}, + PropertyTag.RESERVED_REGIONS: {"class": ReservedRegionsValue, "kwargs": {}}, + PropertyTag.VALIDATE_REGIONS: { + "class": BoolValue, + "kwargs": {"true_string": "ON", "false_string": "OFF"}, + }, + PropertyTag.RAM_START_ADDRESS: {"class": IntValue, "kwargs": {"str_format": "hex"}}, + PropertyTag.RAM_SIZE: {"class": IntValue, "kwargs": {"str_format": "size"}}, + PropertyTag.SYSTEM_DEVICE_IDENT: { + "class": IntValue, + "kwargs": {"str_format": "hex"}, + }, + PropertyTag.FLASH_SECURITY_STATE: { + "class": BoolValue, + "kwargs": { + "true_values": (0x00000000, 0x5AA55AA5), + "true_string": "UNSECURE", + "false_values": (0x00000001, 0xC33CC33C), + "false_string": "SECURE", + }, + }, + PropertyTag.UNIQUE_DEVICE_IDENT: {"class": DeviceUidValue, "kwargs": {}}, + PropertyTag.FLASH_FAC_SUPPORT: { + "class": BoolValue, + "kwargs": {"true_string": "ON", "false_string": "OFF"}, + }, + PropertyTag.FLASH_ACCESS_SEGMENT_SIZE: { + "class": IntValue, + "kwargs": {"str_format": "size"}, + }, + PropertyTag.FLASH_ACCESS_SEGMENT_COUNT: { + "class": IntValue, + "kwargs": {"str_format": "int32"}, + }, + PropertyTag.FLASH_READ_MARGIN: { + "class": EnumValue, + "kwargs": {"enum": FlashReadMargin, "na_msg": "Unknown Margin"}, + }, + PropertyTag.QSPI_INIT_STATUS: { + "class": EnumValue, + "kwargs": {"enum": StatusCode, "na_msg": "Unknown Error"}, + }, + PropertyTag.TARGET_VERSION: {"class": VersionValue, "kwargs": {}}, + PropertyTag.EXTERNAL_MEMORY_ATTRIBUTES: { + "class": ExternalMemoryAttributesValue, + "kwargs": {"mem_id": None}, + }, + PropertyTag.RELIABLE_UPDATE_STATUS: { + "class": EnumValue, + "kwargs": {"enum": StatusCode, "na_msg": "Unknown Error"}, + }, + PropertyTag.FLASH_PAGE_SIZE: {"class": IntValue, "kwargs": {"str_format": "size"}}, + PropertyTag.IRQ_NOTIFIER_PIN: {"class": IrqNotifierPinValue, "kwargs": {}}, + PropertyTag.PFR_KEYSTORE_UPDATE_OPT: { + "class": EnumValue, + "kwargs": {"enum": PfrKeystoreUpdateOpt, "na_msg": "Unknown"}, + }, + PropertyTag.BYTE_WRITE_TIMEOUT_MS: { + "class": IntValue, + "kwargs": {"str_format": "dec"}, + }, + PropertyTag.FUSE_LOCKED_STATUS: { + "class": FuseLockedStatus, + "kwargs": {}, + }, +} + +PROPERTIES_KW45XX = { + PropertyTagKw45xx.VERIFY_ERASE: { + "class": BoolValue, + "kwargs": {"true_string": "ENABLE", "false_string": "DISABLE"}, + }, + PropertyTagKw45xx.BOOT_STATUS_REGISTER: { + "class": IntValue, + "kwargs": {"str_format": "int32"}, + }, + PropertyTagKw45xx.FIRMWARE_VERSION: { + "class": IntValue, + "kwargs": {"str_format": "int32"}, + }, + PropertyTagKw45xx.FUSE_PROGRAM_VOLTAGE: { + "class": BoolValue, + "kwargs": { + "true_string": "Over Drive Voltage (2.5 V)", + "false_string": "Normal Voltage (1.8 V)", + }, + }, +} + +PROPERTIES_OVERRIDE = {"kw45xx": PROPERTIES_KW45XX, "k32w1xx": PROPERTIES_KW45XX} +PROPERTY_TAG_OVERRIDE = {"kw45xx": PropertyTagKw45xx, "k32w1xx": PropertyTagKw45xx} + + +def parse_property_value( + property_tag: int, + raw_values: List[int], + ext_mem_id: Optional[int] = None, + family: Optional[str] = None, +) -> Optional[PropertyValueBase]: + """Parse the property value received from the device. + + :param property_tag: Tag representing the property + :param raw_values: Data received from the device + :param ext_mem_id: ID of the external memory used to read the property, defaults to None + :param family: supported family + :return: Object representing the property + """ + assert isinstance(property_tag, int) + assert isinstance(raw_values, list) + properties_dict = deepcopy(PROPERTIES) + if family: + properties_dict.update(PROPERTIES_OVERRIDE[family]) # type: ignore + if property_tag not in list(properties_dict.keys()): + return None + property_value = next( + value for key, value in properties_dict.items() if key.tag == property_tag + ) + cls: Callable = property_value["class"] + kwargs: dict = property_value["kwargs"] + if "mem_id" in kwargs: + kwargs["mem_id"] = ext_mem_id + obj = cls(property_tag, raw_values, **kwargs) + if family: + property_tag_override = PROPERTY_TAG_OVERRIDE[family].from_tag(property_tag) + obj.name = property_tag_override.label + obj.desc = property_tag_override.description + return obj diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/__init__.py new file mode 100644 index 00000000..de8a3212 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Mboot Protocols.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/base.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/base.py new file mode 100644 index 00000000..97330692 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/base.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""MBoot protocol base.""" +from ...utils.interfaces.protocol.protocol_base import ProtocolBase + + +class MbootProtocolBase(ProtocolBase): + """MBoot protocol base class.""" + + allow_abort: bool = False + need_data_split: bool = True diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/bulk_protocol.py b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/bulk_protocol.py new file mode 100644 index 00000000..617d0f50 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/mboot/protocol/bulk_protocol.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Mboot bulk implementation.""" +import logging +from struct import pack, unpack_from +from typing import Optional, Union + +from ...exceptions import SPSDKAttributeError +from ...mboot.commands import CmdResponse, parse_cmd_response +from ...mboot.exceptions import McuBootConnectionError, McuBootDataAbortError +from ...mboot.protocol.base import MbootProtocolBase +from ...utils.exceptions import SPSDKTimeoutError +from ...utils.interfaces.commands import CmdPacketBase +from ...utils.spsdk_enum import SpsdkEnum + + +class ReportId(SpsdkEnum): + """Report ID enum.""" + + CMD_OUT = (0x01, "CMD_OUT") + CMD_IN = (0x03, "CMD_IN") + DATA_OUT = (0x02, "DATA_OUT") + DATA_IN = (0x04, "DATA_IN") + + +logger = logging.getLogger(__name__) + + +class MbootBulkProtocol(MbootProtocolBase): + """Mboot Bulk protocol.""" + + def open(self) -> None: + """Open the interface.""" + self.device.open() + + def close(self) -> None: + """Close the interface.""" + self.device.close() + + @property + def is_opened(self) -> bool: + """Indicates whether interface is open.""" + return self.device.is_opened + + def write_data(self, data: bytes) -> None: + """Encapsulate data into frames and send them to device. + + :param data: Data to be sent + """ + frame = self._create_frame(data, ReportId.DATA_OUT) + if self.allow_abort: + try: + abort_data = self.device.read(1024, timeout=10) + logger.debug(f"Read {len(abort_data)} bytes of abort data") + except Exception as e: + raise McuBootConnectionError(str(e)) from e + if abort_data: + logger.debug(f"{', '.join(f'{b:02X}' for b in abort_data)}") + raise McuBootDataAbortError() + self.device.write(frame) + + def write_command(self, packet: CmdPacketBase) -> None: + """Encapsulate command into frames and send them to device. + + :param packet: Command packet object to be sent + :raises SPSDKAttributeError: Command packed contains no data to be sent + """ + data = packet.to_bytes(padding=False) + if not data: + raise SPSDKAttributeError("Incorrect packet type") + frame = self._create_frame(data, ReportId.CMD_OUT) + self.device.write(frame) + + def read(self, length: Optional[int] = None) -> Union[CmdResponse, bytes]: + """Read data from device. + + :return: read data + :raises SPSDKTimeoutError: Timeout occurred + """ + data = self.device.read(1024) + if not data: + logger.error("Cannot read from HID device") + raise SPSDKTimeoutError() + return self._parse_frame(bytes(data)) + + def _create_frame(self, data: bytes, report_id: ReportId) -> bytes: + """Encode the USB packet. + + :param report_id: ID of the report (see: HID_REPORT) + :param data: Data to send + :return: Encoded bytes and length of the final report frame + """ + raw_data = pack("<2BH", report_id.tag, 0x00, len(data)) + raw_data += data + logger.debug(f"OUT[{len(raw_data)}]: {', '.join(f'{b:02X}' for b in raw_data)}") + return raw_data + + @staticmethod + def _parse_frame(raw_data: bytes) -> Union[CmdResponse, bytes]: + """Decodes the data read on USB interface. + + :param raw_data: Data received + :return: CmdResponse object or data read + :raises McuBootDataAbortError: Transaction aborted by target + """ + logger.debug(f"IN [{len(raw_data)}]: {', '.join(f'{b:02X}' for b in raw_data)}") + report_id, _, plen = unpack_from("<2BH", raw_data) + if plen == 0: + raise McuBootDataAbortError() + data = raw_data[4 : 4 + plen] + if report_id == ReportId.CMD_IN: + return parse_cmd_response(data) + return data diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/misc.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/misc.py new file mode 100644 index 00000000..09046b63 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/misc.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Miscellaneous functions in SBFile module.""" + +from datetime import datetime, timezone +from typing import Any, Sequence, Union + +from ..exceptions import SPSDKError +from ..utils import misc + + +class SecBootBlckSize: + """Helper methods allowing to convert size to number of blocks and back. + + Note: The class is not intended to be instantiated + """ + + # Size of cipher block in bytes + BLOCK_SIZE = 16 + + @staticmethod + def is_aligned(size: int) -> bool: + """Whether size is aligned to cipher block size. + + :param size: given size in bytes + :return: True if yes, False otherwise + """ + return size % SecBootBlckSize.BLOCK_SIZE == 0 + + @staticmethod + def align(size: int) -> int: + """Align given size to block size. + + :param size: in bytes + :return: size aligned up to block size + """ + return misc.align(size, SecBootBlckSize.BLOCK_SIZE) + + @staticmethod + def to_num_blocks(size: int) -> int: + """Converts size to number of cipher blocks. + + :param size: to be converted, the size must be aligned to block boundary + :return: corresponding number of cipher blocks + :raises SPSDKError: Raised when size is not aligned to block boundary + """ + if not SecBootBlckSize.is_aligned(size): + raise SPSDKError( + f"Invalid size {size}, expected number aligned to BLOCK size {SecBootBlckSize.BLOCK_SIZE}" + ) + return size // SecBootBlckSize.BLOCK_SIZE + + @staticmethod + def align_block_fill_random(data: bytes) -> bytes: + """Align block size to cipher block size. + + :param data: to be aligned + :return: data aligned to cipher block size, filled with random values + """ + return misc.align_block_fill_random(data, SecBootBlckSize.BLOCK_SIZE) + + +# the type represents input formats for BcdVersion3 value, see BcdVersion3.to_version +BcdVersion3Format = Union["BcdVersion3", str] + + +class BcdVersion3: + """Version in format #.#.#, where # is BCD number (1-4 digits).""" + + # default value + DEFAULT = "999.999.999" + + @staticmethod + def _check_number(num: int) -> bool: + """Check given number is a valid version number. + + :param num: to be checked + :return: True if number format is valid + :raises SPSDKError: If number format is not valid + """ + if num < 0 or num > 0x9999: + raise SPSDKError("Invalid number range") + for index in range(4): + if (num >> 4 * index) & 0xF > 0x9: + raise SPSDKError("Invalid number, contains digit > 9") + return True + + @staticmethod + def _num_from_str(text: str) -> int: + """Converts BCD number from text to int. + + :param text: given string to be converted to a version number + :return: version number + :raises SPSDKError: If format is not valid + """ + if len(text) < 0 or len(text) > 4: + raise SPSDKError("Invalid text length") + result = int(text, 16) + BcdVersion3._check_number(result) + return result + + @staticmethod + def from_str(text: str) -> "BcdVersion3": + """Convert string to BcdVersion instance. + + :param text: version in format #.#.#, where # is 1-4 decimal digits + :return: BcdVersion3 instance + :raises SPSDKError: If format is not valid + """ + parts = text.split(".") + if len(parts) != 3: + raise SPSDKError("Invalid length") + major = BcdVersion3._num_from_str(parts[0]) + minor = BcdVersion3._num_from_str(parts[1]) + service = BcdVersion3._num_from_str(parts[2]) + return BcdVersion3(major, minor, service) + + @staticmethod + def to_version(input_version: BcdVersion3Format) -> "BcdVersion3": + """Convert different input formats into BcdVersion3 instance. + + :param input_version: either directly BcdVersion3 or string + :raises SPSDKError: Raises when the format is unsupported + :return: BcdVersion3 instance + """ + if isinstance(input_version, BcdVersion3): + return input_version + if isinstance(input_version, str): + return BcdVersion3.from_str(input_version) + raise SPSDKError("unsupported format") + + def __init__(self, major: int = 1, minor: int = 0, service: int = 0): + """Initialize BcdVersion3. + + :param major: number in BCD format, 1-4 decimal digits + :param minor: number in BCD format, 1-4 decimal digits + :param service: number in BCD format, 1-4 decimal digits + :raises SPSDKError: Invalid version + """ + if not all( + [ + BcdVersion3._check_number(major), + BcdVersion3._check_number(minor), + BcdVersion3._check_number(service), + ] + ): + raise SPSDKError("Invalid version") + self.major = major + self.minor = minor + self.service = service + + def __str__(self) -> str: + return f"{self.major:X}.{self.minor:X}.{self.service:X}" + + def __repr__(self) -> str: + return self.__class__.__name__ + ": " + self.__str__() + + def __eq__(self, other: Any) -> bool: + return ( + isinstance(other, BcdVersion3) + and (self.major == other.major) + and (self.minor == other.minor) + and (self.service == other.service) + ) + + @property + def nums(self) -> Sequence[int]: + """Return array of version numbers: [major, minor, service].""" + return [self.major, self.minor, self.service] + + +def pack_timestamp(value: datetime) -> int: + """Converts datetime to millisecond since 1.1.2000. + + :param value: datetime to be converted + :return: number of milliseconds since 1.1.2000 00:00:00; 64-bit integer + :raises SPSDKError: When there is incorrect result of conversion + """ + assert isinstance(value, datetime) + start = datetime(2000, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc).timestamp() + result = int((value.timestamp() - start) * 1000000) + if result < 0 or result > 0xFFFFFFFFFFFFFFFF: + raise SPSDKError("Incorrect result of conversion") + return result + + +def unpack_timestamp(value: int) -> datetime: + """Converts timestamp in milliseconds into datetime. + + :param value: number of milliseconds since 1.1.2000 00:00:00; 64-bit integer + :return: corresponding datetime + :raises SPSDKError: When there is incorrect result of conversion + """ + assert isinstance(value, int) + if value < 0 or value > 0xFFFFFFFFFFFFFFFF: + raise SPSDKError("Incorrect result of conversion") + start = int( + datetime(2000, 1, 1, 0, 0, 0, 0, tzinfo=timezone.utc).timestamp() * 1000000 + ) + return datetime.fromtimestamp((start + value) / 1000000) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/__init__.py new file mode 100644 index 00000000..02d575a2 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module implementing SB2 and SB2.1 File.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_ebnf_grammar.txt b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_ebnf_grammar.txt new file mode 100644 index 00000000..50d8ba63 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_ebnf_grammar.txt @@ -0,0 +1,174 @@ +/* BD file grammar in EBNF form */ +command_file ::= pre_section_block* section_block* + +pre_section_block ::= options_block +| constants_block +| sources_block +| keyblob_block + +options_block ::= OPTIONS '{' option_def* '}' + +option_def ::= IDENT '=' const_expr ';' + +constants_block ::= CONSTANTS '{' constant_def* '}' + +constant_def ::= IDENT '=' bool_expr ';' + +sources_block ::= SOURCES '{' source_def* '}' + +source_def ::= IDENT '=' source_value ( '(' option_list? ')' )? ';' + +source_value ::= STRING_LITERAL +| EXTERN '(' int_const_expr ')' + +option_list ::= IDENT '=' const_expr (',' IDENT '=' const_expr )* + +keyblob_block ::= KEYBLOB '(' int_const_expr ')' '{' keyblob_contents '}' + +keyblob_contents ::= '(' option_list* ')' + +section_block ::= SECTION '(' int_const_expr section_options? ')' section_contents + +section_options ::= ';' option_list? + +section_contents ::= '{' statement* '}' +| '<=' source_name ';' + +statement ::= basic_stmt ';' +| from_stmt +| if_stmt +| keywrap_stmt + +basic_stmt ::= load_stmt +| load_ifr_stmt +| call_stmt +| jump_sp_stmt +| mode_stmt +| message_stmt +| erase_stmt +| enable_stmt +| reset_stmt +| encrypt_stmt +| keystore_stmt + +load_stmt ::= LOAD load_opt load_data load_target + +load_opt ::= IDENT +| int_const_expr +| empty + +load_data ::= int_const_expr +| STRING_LITERAL +| IDENT +| section_list +| section_list FROM IDENT +| BINARY_BLOB + +load_target ::= '>' '.' +| '>' address_or_range + +section_list ::= section_ref (',' section_ref )* + +section_ref ::= '~' SECTION_NAME +| SECTION_NAME + +erase_stmt ::= ERASE address_or_range +| ERASE ALL + +address_or_range ::= int_const_expr ( '..' int_const_expr)? + +symbol_ref ::= IDENT'?' ':' IDENT + +load_ifr_stmt ::= LOAD IFR int_const_expr '>' int_const_expr + +call_stmt ::= call_type call_target call_arg? + +call_type ::= CALL +| JUMP + +call_target ::= int_const_expr +| symbol_ref +| IDENT + +call_arg ::= '(' int_const_expr? ')' + +jump_sp_stmt ::= JUMP_SP int_const_expr call_target call_arg? + +from_stmt ::= FROM IDENT '{' in_from_stmt* '}' + +in_from_stmt ::= basic_stmt ';' +| if_stmt + +mode_stmt ::= MODE int_const_expr + +message_stmt ::= message_type STRING_LITERAL + +message_type ::= INFO +| WARNING +| ERROR + +keystore_stmt ::= KEYSTORE_TO_NV mem_opt address_or_range +| KEYSTORE_FROM_NV mem_opt address_or_range + +mem_opt ::= IDENT +| '@' int_const_expr +| empty + +if_stmt ::= IF bool_expr '{' statement* '}' else_stmt? + +else_stmt ::= ELSE '(' statement* ')' +| ELSE if_stmt + +keywrap_stmt ::= KEYWRAP '(' int_const_expr ')' '{' statement* '}' + +encrypt_stmt ::= ENCRYPT '(' int_const_expr ')' '{' statement* '}' + +enable_stmt ::= ENABLE AT_INT_LITERAL int_const_expr + +reset_stmt ::= RESET + +ver_check_stmt ::= VERSION_CHECK sec_or_nsec int_const_expr + +sec_or_nsec ::= SEC +| NSEC + +const_expr ::= STRING_LITERAL +| bool_expr + +int_const_expr ::= expr + +bool_expr ::= bool_expr '<' bool_expr +| bool_expr '<=' bool_expr +| bool_expr '>' bool_expr +| bool_expr '>=' bool_expr +| bool_expr '==' bool_expr +| bool_expr '!=' bool_expr +| bool_expr '&&' bool_expr +| bool_expr '||' bool_expr +| '(' bool_expr ')' +| int_const_expr +| '!' bool_expr +| DEFINED '(' IDENT ')' +| IDENT '(' source_name ')' + +expr ::= expr '+' expr +| expr '-' expr +| expr '*' expr +| expr '/' expr +| expr '%' expr +| expr '<<' expr +| expr '>>' expr +| expr '&' expr +| expr '|' expr +| expr '^' expr +| expr '.' INT_SIZE +| '(' expr ')' +| INT_LITERAL +| IDENT +| SYMBOL_REF +| unary_expr +| SIZEOF '(' SYMBOL_REF ')' +| SIZEOF '(' IDENT ')' + +unary_expr ::= '+' expr +| '-' expr diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_grammer.txt b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_grammer.txt new file mode 100644 index 00000000..51f8e7fd --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/bd_grammer.txt @@ -0,0 +1,210 @@ +/* BD file grammar in BNF form */ +command_file ::= pre_section_block section_block + +pre_section_block ::= pre_section_block options_block +| pre_section_block constants_block +| pre_section_block sources_block +| pre_section_block keyblob_block +| empty + +options_block ::= OPTIONS '{' option_def '}' + +option_def ::= option_def IDENT '=' const_expr ';' +| empty + +constants_block ::= CONSTANTS '{' constant_def '}' + +constant_def ::= constant_def IDENT '=' bool_expr ';' +| empty + +sources_block ::= SOURCES '{' source_def '}' + +source_def ::= source_def IDENT '=' source_value ';' +| source_def IDENT '=' source_value '(' source_attr_list ')' ';' +| empty + +source_value ::= STRING_LITERAL +| EXTERN '(' int_const_expr ')' + +source_attr_list ::= option_list +| empty + +option_list ::= IDENT '=' const_expr ',' option_list +| IDENT '=' const_expr + +keyblob_block ::= KEYBLOB '(' int_const_expr ')' '{' keyblob_contents '}' + +# ----------------------------- Original keyblob grammar ------------- +# keyblob_contents ::= keyblob_contents '(' keyblob_options_list ')' +# | empty + +# keyblob_options_list ::= keyblob_options +# | empty + +# keyblob_options ::= IDENT '=' const_expr ',' keyblob_options +# | IDENT '=' const_expr + +# ----------------------------- New keyblob grammar ------------------ +keyblob_contents ::= '(' keyblob_options ')' + +keyblob_options ::= IDENT '=' const_expr ',' keyblob_options +| IDENT '=' const_expr + +section_block ::= section_block SECTION '(' int_const_expr section_options ')' section_contents +| empty + +section_options ::= ';' option_list +| ';' +| empty + +section_contents ::= '{' statement '}' +| '<=' source_name ';' + +statement ::= statement basic_stmt ';' +| statement from_stmt +| statement if_stmt +| statement encrypt_block +| statement keywrap_block +| empty + +basic_stmt ::= load_stmt +| load_ifr_stmt +| call_stmt +| jump_sp_stmt +| mode_stmt +| message_stmt +| erase_stmt +| enable_stmt +| reset_stmt +| keystore_stmt + +load_stmt ::= LOAD load_opt load_data load_target + +load_opt ::= IDENT +| int_const_expr +| empty + +load_data ::= int_const_expr +| STRING_LITERAL +| SOURCE_NAME +| section_list +| section_list FROM SOURCE_NAME +| BINARY_BLOB + +load_target ::= '>' '.' +| '>' address_or_range +| empty + +section_list ::= section_list ',' section_ref +| section_ref + +section_ref ::= '~' SECTION_NAME +| SECTION_NAME + +erase_stmt ::= ERASE address_or_range +| ERASE ALL + +address_or_range ::= int_const_expr +| int_const_expr '..' int_const_expr + +symbol_ref ::= SOURCE_NAME'?' ':' IDENT + +load_ifr_stmt ::= LOAD IFR int_const_expr '>' int_const_expr + +call_stmt ::= call_type call_target call_arg + +call_type ::= CALL +| JUMP + +call_target ::= int_const_expr +| symbol_ref +| IDENT + +call_arg ::= '(' ')' +| '(' int_const_expr ')' +| empty + +jump_sp_stmt ::= JUMP_SP int_const_expr call_target call_arg + +from_stmt ::= FROM IDENT '{' in_from_stmt '}' + +in_from_stmt ::= in_from_stmt basic_stmt ';' +| in_from_stmt if_stmt +| empty + +mode_stmt ::= MODE int_const_expr + +messate_stmt ::= message_type STRING_LITERAL + +message_type ::= INFO +| WARNING +| ERROR + +keystore_stmt ::= KEYSTORE_TO_NV mem_opt address_or_range +| KEYSTORE_FROM_NV mem_opt address_or_range + +mem_opt ::= IDENT +| '@' int_const_expr +| empty + +if_stmt ::= IF bool_expr '{' statement '}' else_stmt + +else_stmt ::= ELSE '(' statement ')' +| ELSE if_stmt +| empty + +keywrap_block ::= KEYWRAP '(' int_const_expr ')' '{' LOAD BINARY_BLOB GT int_const_expr SEMICOLON '}' + +encrypt_block ::= ENCRYPT '(' int_const_expr ')' '{' load_stmt '}' + +enable_stmt ::= ENABLE AT_INT_LITERAL int_const_expr + +reset_stmt ::= RESET + +ver_check_stmt ::= VERSION_CHECK sec_or_nsec int_const_expr + +sec_or_nsec ::= SEC +| NSEC + +const_expr ::= STRING_LITERAL +| bool_expr + +int_const_expr ::= expr + +bool_expr ::= bool_expr '<' bool_expr +| bool_expr '<=' bool_expr +| bool_expr '>' bool_expr +| bool_expr '>=' bool_expr +| bool_expr '==' bool_expr +| bool_expr '!=' bool_expr +| bool_expr '&&' bool_expr +| bool_expr '||' bool_expr +| '(' bool_expr ')' +| int_const_expr +| '!' bool_expr +| DEFINED '(' IDENT ')' +| IDENT '(' source_name ')' + +expr ::= expr '+' expr +| expr '-' expr +| expr '*' expr +| expr '/' expr +| expr '%' expr +| expr '<<' expr +| expr '>>' expr +| expr '&' expr +| expr '|' expr +| expr '^' expr +| expr '.' INT_SIZE +| '(' expr ')' +| INT_LITERAL +| IDENT +| SYMBOL_REF +| unary_expr +| SIZEOF '(' SYMBOL_REF ')' +| SIZEOF '(' IDENT ')' + +unary_expr ::= '+' expr +| '-' expr + +empty ::= diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/commands.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/commands.py new file mode 100644 index 00000000..7ca87ae9 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/commands.py @@ -0,0 +1,1072 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Commands used by SBFile module.""" +import math +from abc import abstractmethod +from struct import calcsize, pack, unpack_from +from typing import Mapping, Optional, Type + +from crcmod.predefined import mkPredefinedCrcFun +from typing_extensions import Self + +from ...exceptions import SPSDKError +from ...mboot.memories import ExtMemId +from ...sbfile.misc import SecBootBlckSize +from ...utils.abstract import BaseClass +from ...utils.misc import Endianness +from ...utils.spsdk_enum import SpsdkEnum + +######################################################################################################################## +# Constants +######################################################################################################################## + +DEVICE_ID_MASK = 0xFF +DEVICE_ID_SHIFT = 0 +GROUP_ID_MASK = 0xF00 +GROUP_ID_SHIFT = 8 + + +######################################################################################################################## +# Enums +######################################################################################################################## +class EnumCmdTag(SpsdkEnum): + """Command tags.""" + + NOP = (0x0, "NOP") + TAG = (0x1, "TAG") + LOAD = (0x2, "LOAD") + FILL = (0x3, "FILL") + JUMP = (0x4, "JUMP") + CALL = (0x5, "CALL") + ERASE = (0x7, "ERASE") + RESET = (0x8, "RESET") + MEM_ENABLE = (0x9, "MEM_ENABLE") + PROG = (0xA, "PROG") + FW_VERSION_CHECK = (0xB, "FW_VERSION_CHECK", "Check FW version fuse value") + WR_KEYSTORE_TO_NV = ( + 0xC, + "WR_KEYSTORE_TO_NV", + "Restore key-store restore to non-volatile memory", + ) + WR_KEYSTORE_FROM_NV = ( + 0xD, + "WR_KEYSTORE_FROM_NV", + "Backup key-store from non-volatile memory", + ) + + +class EnumSectionFlag(SpsdkEnum): + """Section flags.""" + + BOOTABLE = (0x0001, "BOOTABLE") + CLEARTEXT = (0x0002, "CLEARTEXT") + LAST_SECT = (0x8000, "LAST_SECT") + + +######################################################################################################################## +# Header Class +######################################################################################################################## +class CmdHeader(BaseClass): + """SBFile command header.""" + + FORMAT = "<2BH3L" + SIZE = calcsize(FORMAT) + + @property + def crc(self) -> int: + """Calculate CRC for the header data.""" + raw_data = self._raw_data(crc=0) + checksum = 0x5A + for i in range(1, self.SIZE): + checksum = (checksum + raw_data[i]) & 0xFF + return checksum + + def __init__(self, tag: int, flags: int = 0) -> None: + """Initialize header.""" + if tag not in EnumCmdTag.tags(): + raise SPSDKError("Incorrect command tag") + self.tag = tag + self.flags = flags + self.address = 0 + self.count = 0 + self.data = 0 + + def __repr__(self) -> str: + return f"SB2 Command header, TAG:{self.tag}" + + def __str__(self) -> str: + tag = ( + EnumCmdTag.get_label(self.tag) + if self.tag in EnumCmdTag.tags() + else f"0x{self.tag:02X}" + ) + return ( + f"tag={tag}, flags=0x{self.flags:04X}, " + f"address=0x{self.address:08X}, count=0x{self.count:08X}, data=0x{self.data:08X}" + ) + + def _raw_data(self, crc: int) -> bytes: + """Return raw data of the header with specified CRC. + + :param crc: value to be used + :return: binary representation of the header + """ + return pack( + self.FORMAT, crc, self.tag, self.flags, self.address, self.count, self.data + ) + + def export(self) -> bytes: + """Export command header as bytes.""" + return self._raw_data(self.crc) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command header from bytes. + + :param data: Input data as bytes + :return: CMDHeader object + :raises SPSDKError: raised when size is incorrect + :raises SPSDKError: Raised when CRC is incorrect + """ + if calcsize(cls.FORMAT) > len(data): + raise SPSDKError("Incorrect size") + obj = cls(EnumCmdTag.NOP.tag) + (crc, obj.tag, obj.flags, obj.address, obj.count, obj.data) = unpack_from( + cls.FORMAT, data + ) + if crc != obj.crc: + raise SPSDKError("CRC does not match") + return obj + + +######################################################################################################################## +# Commands Classes +######################################################################################################################## +class CmdBaseClass(BaseClass): + """Base class for all commands.""" + + # bit mask for device ID inside flags + ROM_MEM_DEVICE_ID_MASK = 0xFF00 + # shift for device ID inside flags + ROM_MEM_DEVICE_ID_SHIFT = 8 + # bit mask for group ID inside flags + ROM_MEM_GROUP_ID_MASK = 0xF0 + # shift for group ID inside flags + ROM_MEM_GROUP_ID_SHIFT = 4 + + def __init__(self, tag: EnumCmdTag) -> None: + """Initialize CmdBase.""" + self._header = CmdHeader(tag.tag) + + @property + def header(self) -> CmdHeader: + """Return command header.""" + return self._header + + @property + def raw_size(self) -> int: + """Return size of the command in binary format (including header).""" + return CmdHeader.SIZE # this is default implementation + + def __repr__(self) -> str: + return "Command: " + str( + self._header + ) # default implementation: use command name + + def __str__(self) -> str: + """Return text info about the instance.""" + return repr(self) + "\n" # default implementation is same as __repr__ + + def export(self) -> bytes: + """Return object serialized into bytes.""" + return self._header.export() # default implementation + + +class CmdNop(CmdBaseClass): + """Command NOP class.""" + + def __init__(self) -> None: + """Initialize Command Nop.""" + super().__init__(EnumCmdTag.NOP) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: CMD Nop object + :raises SPSDKError: When there is incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.NOP: + raise SPSDKError("Incorrect header tag") + return cls() + + +class CmdTag(CmdBaseClass): + """Command TAG class. + + It is also used as header for boot section for SB file 1.x. + """ + + def __init__(self) -> None: + """Initialize Command Tag.""" + super().__init__(EnumCmdTag.TAG) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: parsed instance + :raises SPSDKError: When there is incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.TAG: + raise SPSDKError("Incorrect header tag") + result = cls() + result._header = header + return result + + +class CmdLoad(CmdBaseClass): + """Command Load. The load statement is used to store data into the memory.""" + + @property + def address(self) -> int: + """Return address in target processor to load data.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Setter. + + :param value: address in target processor to load data + :raises SPSDKError: When there is incorrect address + """ + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def flags(self) -> int: + """Return command's flag.""" + return self._header.flags + + @flags.setter + def flags(self, value: int) -> None: + """Set command's flag.""" + self._header.flags = value + + @property + def raw_size(self) -> int: + """Return aligned size of the command including header and data.""" + size = CmdHeader.SIZE + len(self.data) + if size % CmdHeader.SIZE: + size += CmdHeader.SIZE - (size % CmdHeader.SIZE) + return size + + def __init__(self, address: int, data: bytes, mem_id: int = 0) -> None: + """Initialize CMD Load.""" + super().__init__(EnumCmdTag.LOAD) + assert isinstance(data, (bytes, bytearray)) + self.address = address + self.data = bytes(data) + self.mem_id = mem_id + + device_id = get_device_id(mem_id) + group_id = get_group_id(mem_id) + + self.flags |= (self.flags & ~self.ROM_MEM_DEVICE_ID_MASK) | ( + (device_id << self.ROM_MEM_DEVICE_ID_SHIFT) & self.ROM_MEM_DEVICE_ID_MASK + ) + + self.flags |= (self.flags & ~self.ROM_MEM_GROUP_ID_MASK) | ( + (group_id << self.ROM_MEM_GROUP_ID_SHIFT) & self.ROM_MEM_GROUP_ID_MASK + ) + + def __str__(self) -> str: + return ( + f"LOAD: Address=0x{self.address:08X}, DataLen={len(self.data)}, " + f"Flags=0x{self.flags:08X}, MemId=0x{self.mem_id:08X}" + ) + + def export(self) -> bytes: + """Export command as binary.""" + self._update_data() + result = super().export() + return result + self.data + + def _update_data(self) -> None: + """Update command data.""" + # padding data + self.data = SecBootBlckSize.align_block_fill_random(self.data) + # update header + self._header.count = len(self.data) + crc32_function = mkPredefinedCrcFun("crc-32-mpeg") + self._header.data = crc32_function(self.data, 0xFFFFFFFF) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: CMD Load object + :raises SPSDKError: Raised when there is invalid CRC + :raises SPSDKError: When there is incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.LOAD: + raise SPSDKError("Incorrect header tag") + header_count = SecBootBlckSize.align(header.count) + cmd_data = data[CmdHeader.SIZE : CmdHeader.SIZE + header_count] + crc32_function = mkPredefinedCrcFun("crc-32-mpeg") + if header.data != crc32_function(cmd_data, 0xFFFFFFFF): + raise SPSDKError("Invalid CRC in the command header") + device_id = ( + header.flags & cls.ROM_MEM_DEVICE_ID_MASK + ) >> cls.ROM_MEM_DEVICE_ID_SHIFT + group_id = ( + header.flags & cls.ROM_MEM_GROUP_ID_MASK + ) >> cls.ROM_MEM_GROUP_ID_SHIFT + mem_id = get_memory_id(device_id, group_id) + obj = cls(header.address, cmd_data, mem_id) + obj.header.data = header.data + obj.header.flags = header.flags + obj._update_data() + return obj + + +class CmdFill(CmdBaseClass): + """Command Fill class.""" + + PADDING_VALUE = 0x00 + + @property + def address(self) -> int: + """Return address of the command Fill.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Set address for the command Fill.""" + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def raw_size(self) -> int: + """Calculate raw size of header.""" + size = CmdHeader.SIZE + size += len(self._pattern) - 4 + if size % CmdHeader.SIZE: + size += CmdHeader.SIZE - (size % CmdHeader.SIZE) + return size + + def __init__( + self, address: int, pattern: int, length: Optional[int] = None + ) -> None: + """Initialize Command Fill. + + :param address: to write data + :param pattern: data to be written + :param length: length of data to be filled, defaults to 4 + :raises SPSDKError: Raised when size is not aligned to 4 bytes + """ + super().__init__(EnumCmdTag.FILL) + length = length or 4 + if length % 4: + raise SPSDKError("Length of memory range to fill must be a multiple of 4") + # if the pattern is a zero, the length is considered also as zero and the + # conversion to bytes produces empty byte "array", which is wrong, as + # zero should be converted to zero byte. Thus in case the pattern_len + # evaluates to 0, we set it to 1. + pattern_len = pattern.bit_length() / 8 or 1 + # We can get a number of 3 bytes, so we consider this as a word and set + # the length to 4 bytes with the first byte being zero. + if 3 == math.ceil(pattern_len): + pattern_len = 4 + pattern_bytes = pattern.to_bytes(math.ceil(pattern_len), Endianness.BIG.value) + # The pattern length is computed above, but as we transform the number + # into bytes, compute the len again just in case - a bit paranoid + # approach chosen. + if len(pattern_bytes) not in [1, 2, 4]: + raise SPSDKError("Pattern must be 1, 2 or 4 bytes long") + replicate = 4 // len(pattern_bytes) + final_pattern = replicate * pattern_bytes + self.address = address + self._pattern = final_pattern + # update header + self._header.data = unpack_from(">L", self._pattern)[0] + self._header.count = length + + @property + def pattern(self) -> bytes: + """Return binary data to fill.""" + return self._pattern + + def __str__(self) -> str: + return f"FILL: Address=0x{self.address:08X}, Pattern=" + " ".join( + f"{byte:02X}" for byte in self._pattern + ) + + def export(self) -> bytes: + """Return command in binary form (serialization).""" + # export cmd + data = super().export() + # export additional data + data = SecBootBlckSize.align_block_fill_random(data) + return data + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Command Fill object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.FILL: + raise SPSDKError("Incorrect header tag") + return cls(header.address, header.data, header.count) + + +class CmdJump(CmdBaseClass): + """Command Jump class.""" + + @property + def address(self) -> int: + """Return address of the command Jump.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Set address of the command Jump.""" + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def argument(self) -> int: + """Return command's argument.""" + return self._header.data + + @argument.setter + def argument(self, value: int) -> None: + """Set command's argument.""" + self._header.data = value + + @property + def spreg(self) -> Optional[int]: + """Return command's Stack Pointer.""" + if self._header.flags == 2: + return self._header.count + + return None + + @spreg.setter + def spreg(self, value: Optional[int] = None) -> None: + """Set command's Stack Pointer.""" + if value is None: + self._header.flags = 0 + self._header.count = 0 + else: + self._header.flags = 2 + self._header.count = value + + def __init__( + self, address: int = 0, argument: int = 0, spreg: Optional[int] = None + ) -> None: + """Initialize Command Jump.""" + super().__init__(EnumCmdTag.JUMP) + self.address = address + self.argument = argument + self.spreg = spreg + + def __str__(self) -> str: + nfo = f"JUMP: Address=0x{self.address:08X}, Argument=0x{self.argument:08X}" + if self.spreg is not None: + nfo += f", SP=0x{self.spreg:08X}" + return nfo + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Command Jump object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.JUMP: + raise SPSDKError("Incorrect header tag") + return cls(header.address, header.data, header.count if header.flags else None) + + +class CmdCall(CmdBaseClass): + """Command Call. + + The call statement is used for inserting a bootloader command that executes a function + from one of the files that are loaded into the memory. + """ + + @property + def address(self) -> int: + """Return command's address.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Set command's address.""" + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def argument(self) -> int: + """Return command's argument.""" + return self._header.data + + @argument.setter + def argument(self, value: int) -> None: + """Set command's argument.""" + self._header.data = value + + def __init__(self, address: int = 0, argument: int = 0) -> None: + """Initialize Command Call.""" + super().__init__(EnumCmdTag.CALL) + self.address = address + self.argument = argument + + def __str__(self) -> str: + return f"CALL: Address=0x{self.address:08X}, Argument=0x{self.argument:08X}" + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Command Call object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.CALL: + raise SPSDKError("Incorrect header tag") + return cls(header.address, header.data) + + +class CmdErase(CmdBaseClass): + """Command Erase class.""" + + @property + def address(self) -> int: + """Return command's address.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Set command's address.""" + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def length(self) -> int: + """Return command's count.""" + return self._header.count + + @length.setter + def length(self, value: int) -> None: + """Set command's count.""" + self._header.count = value + + @property + def flags(self) -> int: + """Return command's flag.""" + return self._header.flags + + @flags.setter + def flags(self, value: int) -> None: + """Set command's flag.""" + self._header.flags = value + + def __init__( + self, address: int = 0, length: int = 0, flags: int = 0, mem_id: int = 0 + ) -> None: + """Initialize Command Erase.""" + super().__init__(EnumCmdTag.ERASE) + self.address = address + self.length = length + self.flags = flags + self.mem_id = mem_id + + device_id = get_device_id(mem_id) + group_id = get_group_id(mem_id) + + self.flags |= (self.flags & ~self.ROM_MEM_DEVICE_ID_MASK) | ( + (device_id << self.ROM_MEM_DEVICE_ID_SHIFT) & self.ROM_MEM_DEVICE_ID_MASK + ) + + self.flags |= (self.flags & ~self.ROM_MEM_GROUP_ID_MASK) | ( + (group_id << self.ROM_MEM_GROUP_ID_SHIFT) & self.ROM_MEM_GROUP_ID_MASK + ) + + def __str__(self) -> str: + return ( + f"ERASE: Address=0x{self.address:08X}, Length={self.length}, Flags=0x{self.flags:08X}, " + f"MemId=0x{self.mem_id:08X}" + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Command Erase object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.ERASE: + raise SPSDKError("Invalid header tag") + device_id = ( + header.flags & cls.ROM_MEM_DEVICE_ID_MASK + ) >> cls.ROM_MEM_DEVICE_ID_SHIFT + group_id = ( + header.flags & cls.ROM_MEM_GROUP_ID_MASK + ) >> cls.ROM_MEM_GROUP_ID_SHIFT + mem_id = get_memory_id(device_id, group_id) + return cls(header.address, header.count, header.flags, mem_id) + + +class CmdReset(CmdBaseClass): + """Command Reset class.""" + + def __init__(self) -> None: + """Initialize Command Reset.""" + super().__init__(EnumCmdTag.RESET) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Cmd Reset object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.RESET: + raise SPSDKError("Invalid header tag") + return cls() + + +class CmdMemEnable(CmdBaseClass): + """Command to configure certain memory.""" + + @property + def address(self) -> int: + """Return command's address.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Set command's address.""" + self._header.address = value + + @property + def size(self) -> int: + """Return command's size.""" + return self._header.count + + @size.setter + def size(self, value: int) -> None: + """Set command's size.""" + self._header.count = value + + @property + def flags(self) -> int: + """Return command's flag.""" + return self._header.flags + + @flags.setter + def flags(self, value: int) -> None: + """Set command's flag.""" + self._header.flags = value + + def __init__(self, address: int, size: int, mem_id: int): + """Initialize CmdMemEnable. + + :param address: source address with configuration data for memory initialization + :param size: size of configuration data used for memory initialization + :param mem_id: identification of memory + """ + super().__init__(EnumCmdTag.MEM_ENABLE) + self.address = address + self.mem_id = mem_id + self.size = size + + device_id = get_device_id(mem_id) + group_id = get_group_id(mem_id) + + self.flags |= (self.flags & ~self.ROM_MEM_DEVICE_ID_MASK) | ( + (device_id << self.ROM_MEM_DEVICE_ID_SHIFT) & self.ROM_MEM_DEVICE_ID_MASK + ) + + self.flags |= (self.flags & ~self.ROM_MEM_GROUP_ID_MASK) | ( + (group_id << self.ROM_MEM_GROUP_ID_SHIFT) & self.ROM_MEM_GROUP_ID_MASK + ) + + def __str__(self) -> str: + return ( + f"MEM-ENABLE: Address=0x{self.address:08X}, Size={self.size}, " + f"Flags=0x{self.flags:08X}, MemId=0x{self.mem_id:08X}" + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: Command Memory Enable object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.MEM_ENABLE: + raise SPSDKError("Invalid header tag") + device_id = ( + header.flags & cls.ROM_MEM_DEVICE_ID_MASK + ) >> cls.ROM_MEM_DEVICE_ID_SHIFT + group_id = ( + header.flags & cls.ROM_MEM_GROUP_ID_MASK + ) >> cls.ROM_MEM_GROUP_ID_SHIFT + mem_id = get_memory_id(device_id, group_id) + return cls(header.address, header.count, mem_id) + + +class CmdProg(CmdBaseClass): + """Command Program class.""" + + @property + def address(self) -> int: + """Return address in target processor to program data.""" + return self._header.address + + @address.setter + def address(self, value: int) -> None: + """Setter. + + :param value: address in target processor to load data + :raises SPSDKError: When there is incorrect address + """ + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect address") + self._header.address = value + + @property + def flags(self) -> int: + """Return command's flag.""" + return self._header.flags + + @flags.setter + def flags(self, value: int) -> None: + """Set command's flag.""" + self._header.flags = self.is_eight_byte + self._header.flags |= value + + @property + def data_word1(self) -> int: + """Return data word 1.""" + return self._header.count + + @data_word1.setter + def data_word1(self, value: int) -> None: + """Setter. + + :param value: first data word + :raises SPSDKError: When there is incorrect value + """ + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect data word 1") + self._header.count = value + + @property + def data_word2(self) -> int: + """Return data word 2.""" + return self._header.data + + @data_word2.setter + def data_word2(self, value: int) -> None: + """Setter. + + :param value: second data word + :raises SPSDKError: When there is incorrect value + """ + if value < 0x00000000 or value > 0xFFFFFFFF: + raise SPSDKError("Incorrect data word 2") + self._header.data = value + + def __init__( + self, + address: int, + mem_id: int, + data_word1: int, + data_word2: int = 0, + flags: int = 0, + ) -> None: + """Initialize CMD Prog.""" + super().__init__(EnumCmdTag.PROG) + + if data_word2: + self.is_eight_byte = 1 + else: + self.is_eight_byte = 0 + + if mem_id < 0 or mem_id > 0xFF: + raise SPSDKError("Invalid ID of memory") + + self.address = address + self.data_word1 = data_word1 + self.data_word2 = data_word2 + self.mem_id = mem_id + self.flags = flags + + self.flags = (self.flags & ~self.ROM_MEM_DEVICE_ID_MASK) | ( + (self.mem_id << self.ROM_MEM_DEVICE_ID_SHIFT) & self.ROM_MEM_DEVICE_ID_MASK + ) + + def __str__(self) -> str: + return ( + f"PROG: Index=0x{self.address:08X}, DataWord1=0x{self.data_word1:08X}, " + f"DataWord2=0x{self.data_word2:08X}, Flags=0x{self.flags:08X}, MemId=0x{self.mem_id:08X}" + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: parsed command object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.PROG: + raise SPSDKError("Invalid header tag") + mem_id = ( + header.flags & cls.ROM_MEM_DEVICE_ID_MASK + ) >> cls.ROM_MEM_DEVICE_ID_SHIFT + return cls(header.address, mem_id, header.count, header.data, header.flags) + + +class VersionCheckType(SpsdkEnum): + """Select type of the version check: either secure or non-secure firmware to be checked.""" + + SECURE_VERSION = (0, "SECURE_VERSION") + NON_SECURE_VERSION = (1, "NON_SECURE_VERSION") + + +class CmdVersionCheck(CmdBaseClass): + """FW Version Check command class. + + Validates version of secure or non-secure firmware. + The command fails if version is < expected. + """ + + def __init__(self, ver_type: VersionCheckType, version: int) -> None: + """Initialize CmdVersionCheck. + + :param ver_type: version check type, see `VersionCheckType` enum + :param version: to be checked + :raises SPSDKError: If invalid version check type + """ + super().__init__(EnumCmdTag.FW_VERSION_CHECK) + if ver_type not in VersionCheckType: + raise SPSDKError("Invalid version check type") + self.header.address = ver_type.tag + self.header.count = version + + @property + def type(self) -> VersionCheckType: + """Return type of the check version, see VersionCheckType enumeration.""" + return VersionCheckType.from_tag(self.header.address) + + @property + def version(self) -> int: + """Return minimal version expected.""" + return self.header.count + + def __str__(self) -> str: + return ( + f"CVER: Type={self.type.label}, Version={str(self.version)}, " + f"Flags=0x{self.header.flags:08X}" + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: parsed command object + :raises SPSDKError: If incorrect header tag + """ + header = CmdHeader.parse(data) + if header.tag != EnumCmdTag.FW_VERSION_CHECK: + raise SPSDKError("Invalid header tag") + ver_type = VersionCheckType.from_tag(header.address) + version = header.count + return cls(ver_type, version) + + +class CmdKeyStoreBackupRestore(CmdBaseClass): + """Shared, abstract implementation for key-store backup and restore command.""" + + # bit mask for controller ID inside flags + ROM_MEM_DEVICE_ID_MASK = 0xFF00 + # shift for controller ID inside flags + ROM_MEM_DEVICE_ID_SHIFT = 8 + + @classmethod + @abstractmethod + def cmd_id(cls) -> EnumCmdTag: + """Return command ID. + + :raises NotImplementedError: Derived class has to implement this method + """ + raise NotImplementedError("Derived class has to implement this method.") + + def __init__(self, address: int, controller_id: ExtMemId): + """Initialize CmdKeyStoreBackupRestore. + + :param address: where to backup key-store or source for restoring key-store + :param controller_id: ID of the memory to backup key-store or source memory to load key-store back + :raises SPSDKError: If invalid address + :raises SPSDKError: If invalid id of memory + """ + super().__init__(self.cmd_id()) + if address < 0 or address > 0xFFFFFFFF: + raise SPSDKError("Invalid address") + self.header.address = address + if controller_id.tag < 0 or controller_id.tag > 0xFF: + raise SPSDKError("Invalid ID of memory") + self.header.flags = (self.header.flags & ~self.ROM_MEM_DEVICE_ID_MASK) | ( + (controller_id.tag << self.ROM_MEM_DEVICE_ID_SHIFT) + & self.ROM_MEM_DEVICE_ID_MASK + ) + self.header.count = ( + 4 # this is useless, but it is kept for backward compatibility with elftosb + ) + + @property + def address(self) -> int: + """Return address where to backup key-store or source for restoring key-store.""" + return self.header.address + + @property + def controller_id(self) -> int: + """Return controller ID of the memory to backup key-store or source memory to load key-store back.""" + return ( + self.header.flags & self.ROM_MEM_DEVICE_ID_MASK + ) >> self.ROM_MEM_DEVICE_ID_SHIFT + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse command from bytes. + + :param data: Input data as bytes + :return: CmdKeyStoreBackupRestore object + :raises SPSDKError: When there is invalid header tag + """ + header = CmdHeader.parse(data) + if header.tag != cls.cmd_id(): + raise SPSDKError("Invalid header tag") + address = header.address + controller_id = ( + header.flags & cls.ROM_MEM_DEVICE_ID_MASK + ) >> cls.ROM_MEM_DEVICE_ID_SHIFT + return cls(address, ExtMemId.from_tag(controller_id)) + + +class CmdKeyStoreBackup(CmdKeyStoreBackupRestore): + """Command to backup keystore from non-volatile memory.""" + + @classmethod + def cmd_id(cls) -> EnumCmdTag: + """Return command ID for backup operation.""" + return EnumCmdTag.WR_KEYSTORE_FROM_NV + + +class CmdKeyStoreRestore(CmdKeyStoreBackupRestore): + """Command to restore keystore into non-volatile memory.""" + + @classmethod + def cmd_id(cls) -> EnumCmdTag: + """Return command ID for restore operation.""" + return EnumCmdTag.WR_KEYSTORE_TO_NV + + +######################################################################################################################## +# Command parser from binary format +######################################################################################################################## +_CMD_CLASS: Mapping[EnumCmdTag, Type[CmdBaseClass]] = { + EnumCmdTag.NOP: CmdNop, + EnumCmdTag.TAG: CmdTag, + EnumCmdTag.LOAD: CmdLoad, + EnumCmdTag.FILL: CmdFill, + EnumCmdTag.JUMP: CmdJump, + EnumCmdTag.CALL: CmdCall, + EnumCmdTag.ERASE: CmdErase, + EnumCmdTag.RESET: CmdReset, + EnumCmdTag.MEM_ENABLE: CmdMemEnable, + EnumCmdTag.PROG: CmdProg, + EnumCmdTag.FW_VERSION_CHECK: CmdVersionCheck, + EnumCmdTag.WR_KEYSTORE_TO_NV: CmdKeyStoreRestore, + EnumCmdTag.WR_KEYSTORE_FROM_NV: CmdKeyStoreBackup, +} + + +def parse_command(data: bytes) -> CmdBaseClass: + """Parse SB 2.x command from bytes. + + :param data: Input data as bytes + :return: parsed command object + :raises SPSDKError: Raised when there is unsupported command provided + """ + header_tag = data[1] + for cmd_tag, cmd in _CMD_CLASS.items(): + if cmd_tag.tag == header_tag: + return cmd.parse(data) + raise SPSDKError(f"Unsupported command: {str(header_tag)}") + + +def get_device_id(mem_id: int) -> int: + """Get device ID from memory ID. + + :param mem_id: memory ID + :return: device ID + """ + return ((mem_id) & DEVICE_ID_MASK) >> DEVICE_ID_SHIFT + + +def get_group_id(mem_id: int) -> int: + """Get group ID from memory ID. + + :param mem_id: memory ID + :return: group ID + """ + return ((mem_id) & GROUP_ID_MASK) >> GROUP_ID_SHIFT + + +def get_memory_id(device_id: int, group_id: int) -> int: + """Get memory ID from device ID and group ID. + + :param device_id: device ID + :param group_id: group ID + :return: memory ID + """ + return (((group_id) << GROUP_ID_SHIFT) & GROUP_ID_MASK) | ( + ((device_id) << DEVICE_ID_SHIFT) & DEVICE_ID_MASK + ) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/headers.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/headers.py new file mode 100644 index 00000000..ad262b90 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/headers.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Image header.""" + +from datetime import datetime +from struct import calcsize, pack, unpack_from +from typing import Optional + +from typing_extensions import Self + +from ...crypto.rng import random_bytes +from ...exceptions import SPSDKError +from ...sbfile.misc import BcdVersion3, pack_timestamp, unpack_timestamp +from ...utils.abstract import BaseClass +from ...utils.misc import swap16 + + +######################################################################################################################## +# Image Header Class (Version SB2) +######################################################################################################################## +# pylint: disable=too-many-instance-attributes +class ImageHeaderV2(BaseClass): + """Image Header V2 class.""" + + FORMAT = "<16s4s4s2BH4I4H4sQ12HI4s" + SIZE = calcsize(FORMAT) + SIGNATURE1 = b"STMP" + SIGNATURE2 = b"sgtl" + + def __init__( + self, + version: str = "2.0", + product_version: str = "1.0.0", + component_version: str = "1.0.0", + build_number: int = 0, + flags: int = 0x08, + nonce: Optional[bytes] = None, + timestamp: Optional[datetime] = None, + ) -> None: + """Initialize Image Header Version 2.x. + + :param version: The image version value (default: 2.0) + :param product_version: The product version (default: 1.0.0) + :param component_version: The component version (default: 1.0.0) + :param build_number: The build number value (default: 0) + :param flags: The flags value (default: 0x08) + :param nonce: The NONCE value; None if TODO ???? + :param timestamp: value requested in the test; None to use current value + """ + self.nonce = nonce + self.version = version + self.flags = flags + self.image_blocks = 0 # will be updated from boot image + self.first_boot_tag_block = 0 + self.first_boot_section_id = 0 + self.offset_to_certificate_block = 0 # will be updated from boot image + self.header_blocks = 0 # will be calculated in the BootImage later + self.key_blob_block = 8 + self.key_blob_block_count = 5 + self.max_section_mac_count = 0 # will be calculated in the BootImage later + self.timestamp = ( + timestamp + if timestamp is not None + else datetime.fromtimestamp(int(datetime.now().timestamp())) + ) + self.product_version: BcdVersion3 = BcdVersion3.to_version(product_version) + self.component_version: BcdVersion3 = BcdVersion3.to_version(component_version) + self.build_number = build_number + + def __repr__(self) -> str: + return f"Header: v{self.version}, {self.image_blocks}" + + def flags_desc(self) -> str: + """Return flag description.""" + return "Signed" if self.flags == 0x8 else "Unsigned" + + def __str__(self) -> str: + """Get info of Header as string.""" + nfo = str() + nfo += f" Version: {self.version}\n" + if self.nonce is not None: + nfo += f" Digest: {self.nonce.hex().upper()}\n" + nfo += f" Flag: 0x{self.flags:X} ({self.flags_desc()})\n" + nfo += f" Image Blocks: {self.image_blocks}\n" + nfo += f" First Boot Tag Block: {self.first_boot_tag_block}\n" + nfo += f" First Boot SectionID: {self.first_boot_section_id}\n" + nfo += f" Offset to Cert Block: {self.offset_to_certificate_block}\n" + nfo += f" Key Blob Block: {self.key_blob_block}\n" + nfo += f" Header Blocks: {self.header_blocks}\n" + nfo += f" Sections MAC Count: {self.max_section_mac_count}\n" + nfo += f" Key Blob Block Count: {self.key_blob_block_count}\n" + nfo += ( + f" Timestamp: {self.timestamp.strftime('%H:%M:%S (%d.%m.%Y)')}\n" + ) + nfo += f" Product Version: {self.product_version}\n" + nfo += f" Component Version: {self.component_version}\n" + nfo += f" Build Number: {self.build_number}\n" + return nfo + + def export(self, padding: Optional[bytes] = None) -> bytes: + """Serialize object into bytes. + + :param padding: header padding 8 bytes (for testing purposes); None to use random value + :return: binary representation + :raises SPSDKError: Raised when format is incorrect + :raises SPSDKError: Raised when length of padding is incorrect + :raises SPSDKError: Raised when length of header is incorrect + """ + if not isinstance(self.nonce, bytes) or len(self.nonce) != 16: + raise SPSDKError("Format is incorrect") + major_version, minor_version = [int(v) for v in self.version.split(".")] + product_version_words = [swap16(v) for v in self.product_version.nums] + component_version_words = [swap16(v) for v in self.product_version.nums] + if padding is None: + padding = random_bytes(8) + else: + if len(padding) != 8: + raise SPSDKError("Invalid length of padding") + + result = pack( + self.FORMAT, + self.nonce, + # padding 8 bytes + padding, + self.SIGNATURE1, + # header version + major_version, + minor_version, + self.flags, + self.image_blocks, + self.first_boot_tag_block, + self.first_boot_section_id, + self.offset_to_certificate_block, + self.header_blocks, + self.key_blob_block, + self.key_blob_block_count, + self.max_section_mac_count, + self.SIGNATURE2, + pack_timestamp(self.timestamp), + # product version + product_version_words[0], + 0, + product_version_words[1], + 0, + product_version_words[2], + 0, + # component version + component_version_words[0], + 0, + component_version_words[1], + 0, + component_version_words[2], + 0, + self.build_number, + # padding[4] + padding[4:], + ) + if len(result) != self.SIZE: + raise SPSDKError("Invalid length of header") + return result + + # pylint: disable=too-many-locals + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialization from binary form. + + :param data: binary representation + :return: parsed instance of the header + :raises SPSDKError: Unable to parse data + """ + if cls.SIZE > len(data): + raise SPSDKError("Insufficient amount of data") + ( + nonce, + # padding0 + _, + signature1, + # header version + major_version, + minor_version, + flags, + image_blocks, + first_boot_tag_block, + first_boot_section_id, + offset_to_certificate_block, + header_blocks, + key_blob_block, + key_blob_block_count, + max_section_mac_count, + signature2, + raw_timestamp, + # product version + pv0, + _, + pv1, + _, + pv2, + _, + # component version + cv0, + _, + cv1, + _, + cv2, + _, + build_number, + # padding1 + _, + ) = unpack_from(cls.FORMAT, data) + + # check header signature 1 + if signature1 != cls.SIGNATURE1: + raise SPSDKError("SIGNATURE #1 doesn't match") + + # check header signature 2 + if signature2 != cls.SIGNATURE2: + raise SPSDKError("SIGNATURE #2 doesn't match") + + obj = cls( + version=f"{major_version}.{minor_version}", + flags=flags, + product_version=f"{swap16(pv0):X}.{swap16(pv1):X}.{swap16(pv2):X}", + component_version=f"{swap16(cv0):X}.{swap16(cv1):X}.{swap16(cv2):X}", + build_number=build_number, + ) + + obj.nonce = nonce + obj.image_blocks = image_blocks + obj.first_boot_tag_block = first_boot_tag_block + obj.first_boot_section_id = first_boot_section_id + obj.offset_to_certificate_block = offset_to_certificate_block + obj.header_blocks = header_blocks + obj.key_blob_block = key_blob_block + obj.key_blob_block_count = key_blob_block_count + obj.max_section_mac_count = max_section_mac_count + obj.timestamp = unpack_timestamp(raw_timestamp) + + return obj diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/images.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/images.py new file mode 100644 index 00000000..fc3e7abf --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/images.py @@ -0,0 +1,705 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Boot Image V2.0, V2.1.""" + +import logging +import os +from datetime import datetime +from typing import Any, Dict, Iterator, List, Optional + +from typing_extensions import Self + +from ...crypto.certificate import Certificate +from ...crypto.hash import EnumHashAlgorithm, get_hash +from ...crypto.hmac import hmac +from ...crypto.rng import random_bytes +from ...crypto.signature_provider import SignatureProvider, get_signature_provider +from ...crypto.symmetric import Counter, aes_key_unwrap, aes_key_wrap +from ...exceptions import SPSDKError +from ...sbfile.misc import SecBootBlckSize +from ...sbfile.sb2.sb_21_helper import SB21Helper +from ...utils.abstract import BaseClass +from ...utils.crypto.cert_blocks import CertBlockV1 +from ...utils.database import DatabaseManager, get_db, get_families, get_schema_file +from ...utils.misc import ( + find_first, + load_configuration, + load_hex_string, + load_text, + value_to_int, + write_file, +) +from ...utils.schema_validator import CommentedConfig, check_config +from . import sly_bd_parser as bd_parser +from .commands import CmdHeader +from .headers import ImageHeaderV2 +from .sections import BootSectionV2, CertSectionV2 + +logger = logging.getLogger(__name__) + + +class SBV2xAdvancedParams: + """The class holds advanced parameters for the SB file encryption. + + These parameters are used for the tests; for production, use can use default values (random keys + current time) + """ + + @staticmethod + def _create_nonce() -> bytes: + """Return random nonce.""" + nonce = bytearray(random_bytes(16)) + # clear nonce bit at offsets 31 and 63 + nonce[9] &= 0x7F + nonce[13] &= 0x7F + return bytes(nonce) + + def __init__( + self, + dek: Optional[bytes] = None, + mac: Optional[bytes] = None, + nonce: Optional[bytes] = None, + timestamp: Optional[datetime] = None, + ): + """Initialize SBV2xAdvancedParams. + + :param dek: DEK key + :param mac: MAC key + :param nonce: nonce + :param timestamp: fixed timestamp for the header; use None to use current date/time + :raises SPSDKError: Invalid dek or mac + :raises SPSDKError: Invalid length of nonce + """ + self._dek: bytes = dek if dek else random_bytes(32) + self._mac: bytes = mac if mac else random_bytes(32) + self._nonce: bytes = nonce if nonce else SBV2xAdvancedParams._create_nonce() + if timestamp is None: + timestamp = datetime.now() + self._timestamp = datetime.fromtimestamp(int(timestamp.timestamp())) + if len(self._dek) != 32 and len(self._mac) != 32: + raise SPSDKError("Invalid dek or mac") + if len(self._nonce) != 16: + raise SPSDKError("Invalid length of nonce") + + @property + def dek(self) -> bytes: + """Return DEK key.""" + return self._dek + + @property + def mac(self) -> bytes: + """Return MAC key.""" + return self._mac + + @property + def nonce(self) -> bytes: + """Return NONCE.""" + return self._nonce + + @property + def timestamp(self) -> datetime: + """Return timestamp.""" + return self._timestamp + + +######################################################################################################################## +# Secure Boot Image Class (Version 2.1) +######################################################################################################################## +class BootImageV21(BaseClass): + """Boot Image V2.1 class.""" + + # Image specific data + HEADER_MAC_SIZE = 32 + KEY_BLOB_SIZE = 80 + SHA_256_SIZE = 32 + + # defines + FLAGS_SHA_PRESENT_BIT = 0x8000 # image contains SHA-256 + FLAGS_ENCRYPTED_SIGNED_BIT = 0x0008 # image is signed and encrypted + + def __init__( + self, + kek: bytes, + *sections: BootSectionV2, + product_version: str = "1.0.0", + component_version: str = "1.0.0", + build_number: int = 0, + advanced_params: SBV2xAdvancedParams = SBV2xAdvancedParams(), + flags: int = FLAGS_SHA_PRESENT_BIT | FLAGS_ENCRYPTED_SIGNED_BIT, + ) -> None: + """Initialize Secure Boot Image V2.1. + + :param kek: key to wrap DEC and MAC keys + + :param product_version: The product version (default: 1.0.0) + :param component_version: The component version (default: 1.0.0) + :param build_number: The build number value (default: 0) + + :param advanced_params: optional advanced parameters for encryption; it is recommended to use default value + :param flags: see flags defined in class. + :param sections: Boot sections + """ + self._kek = kek + self.signature_provider: Optional[ + SignatureProvider + ] = None # this should be assigned for export, not needed for parsing + self._dek = advanced_params.dek + self._mac = advanced_params.mac + self._header = ImageHeaderV2( + version="2.1", + product_version=product_version, + component_version=component_version, + build_number=build_number, + flags=flags, + nonce=advanced_params.nonce, + timestamp=advanced_params.timestamp, + ) + self._cert_block: Optional[CertBlockV1] = None + self.boot_sections: List[BootSectionV2] = [] + # ... + for section in sections: + self.add_boot_section(section) + + @property + def header(self) -> ImageHeaderV2: + """Return image header.""" + return self._header + + @property + def dek(self) -> bytes: + """Data encryption key.""" + return self._dek + + @property + def mac(self) -> bytes: + """Message authentication code.""" + return self._mac + + @property + def kek(self) -> bytes: + """Return key to wrap DEC and MAC keys.""" + return self._kek + + @property + def cert_block(self) -> Optional[CertBlockV1]: + """Return certificate block; None if SB file not signed or block not assigned yet.""" + return self._cert_block + + @cert_block.setter + def cert_block(self, value: CertBlockV1) -> None: + """Setter. + + :param value: block to be assigned; None to remove previously assigned block + """ + assert isinstance(value, CertBlockV1) + self._cert_block = value + self._cert_block.alignment = 16 + + @property + def signed(self) -> bool: + """Return flag whether SB file is signed.""" + return True # SB2.1 is always signed + + @property + def cert_header_size(self) -> int: + """Return image raw size (not aligned) for certificate header.""" + size = ImageHeaderV2.SIZE + self.HEADER_MAC_SIZE + size += self.KEY_BLOB_SIZE + # Certificates Section + cert_blk = self.cert_block + if cert_blk: + size += cert_blk.raw_size + return size + + @property + def raw_size(self) -> int: + """Return image raw size (not aligned).""" + # Header, HMAC and KeyBlob + size = ImageHeaderV2.SIZE + self.HEADER_MAC_SIZE + size += self.KEY_BLOB_SIZE + # Certificates Section + cert_blk = self.cert_block + if cert_blk: + size += cert_blk.raw_size + if not self.signed: # pragma: no cover # SB2.1 is always signed + raise SPSDKError("Certificate block is not signed") + size += cert_blk.signature_size + # Boot Sections + for boot_section in self.boot_sections: + size += boot_section.raw_size + return size + + def __len__(self) -> int: + return len(self.boot_sections) + + def __getitem__(self, key: int) -> BootSectionV2: + return self.boot_sections[key] + + def __setitem__(self, key: int, value: BootSectionV2) -> None: + self.boot_sections[key] = value + + def __iter__(self) -> Iterator[BootSectionV2]: + return self.boot_sections.__iter__() + + def update(self) -> None: + """Update BootImageV21.""" + if self.boot_sections: + self._header.first_boot_section_id = self.boot_sections[0].uid + # calculate first boot tag block + data_size = self._header.SIZE + self.HEADER_MAC_SIZE + self.KEY_BLOB_SIZE + cert_blk = self.cert_block + if cert_blk is not None: + data_size += cert_blk.raw_size + if not self.signed: # pragma: no cover # SB2.1 is always signed + raise SPSDKError("Certificate block is not signed") + data_size += cert_blk.signature_size + self._header.first_boot_tag_block = SecBootBlckSize.to_num_blocks(data_size) + # ... + self._header.image_blocks = SecBootBlckSize.to_num_blocks(self.raw_size) + self._header.header_blocks = SecBootBlckSize.to_num_blocks(self._header.SIZE) + self._header.offset_to_certificate_block = ( + self._header.SIZE + self.HEADER_MAC_SIZE + self.KEY_BLOB_SIZE + ) + # Get HMAC count + self._header.max_section_mac_count = 0 + for boot_sect in self.boot_sections: + boot_sect.is_last = True # unified with elftosb + self._header.max_section_mac_count += boot_sect.hmac_count + # Update certificates block header + cert_clk = self.cert_block + if cert_clk is not None: + cert_clk.header.build_number = self._header.build_number + cert_clk.header.image_length = self.cert_header_size + + def __repr__(self) -> str: + return f"SB2.1, {'Signed' if self.signed else 'Plain'} " + + def __str__(self) -> str: + """Return text description of the instance.""" + self.update() + nfo = "\n" + nfo += ":::::::::::::::::::::::::::::::::: IMAGE HEADER ::::::::::::::::::::::::::::::::::::::\n" + nfo += str(self._header) + if self.cert_block is not None: + nfo += "::::::::::::::::::::::::::::::: CERTIFICATES BLOCK ::::::::::::::::::::::::::::::::::::\n" + nfo += str(self.cert_block) + nfo += "::::::::::::::::::::::::::::::::::: BOOT SECTIONS ::::::::::::::::::::::::::::::::::::\n" + for index, section in enumerate(self.boot_sections): + nfo += f"[ SECTION: {index} | UID: 0x{section.uid:08X} ]\n" + nfo += str(section) + return nfo + + def add_boot_section(self, section: BootSectionV2) -> None: + """Add new Boot section into image. + + :param section: Boot section to be added + :raises SPSDKError: Raised when section is not instance of BootSectionV2 class + """ + if not isinstance(section, BootSectionV2): + raise SPSDKError("Section is not instance of BootSectionV2 class") + self.boot_sections.append(section) + + # pylint: disable=too-many-locals + def export(self, padding: Optional[bytes] = None) -> bytes: + """Serialize image object. + + :param padding: header padding (8 bytes) for testing purpose; None to use random values (recommended) + :return: exported bytes + :raises SPSDKError: Raised when there is no boot section to be added + :raises SPSDKError: Raised when certificate is not assigned + :raises SPSDKError: Raised when private key is not assigned + :raises SPSDKError: Raised when private header's nonce is invalid + :raises SPSDKError: Raised when private key does not match certificate + :raises SPSDKError: Raised when there is no debug info + """ + # validate params + if not self.boot_sections: + raise SPSDKError("At least one Boot Section must be added") + if self.cert_block is None: + raise SPSDKError("Certificate is not assigned") + if self.signature_provider is None: + raise SPSDKError( + "Signature provider is not assigned, cannot sign the image" + ) + # Update internals + self.update() + # Export Boot Sections + bs_data = bytes() + bs_offset = ( + ImageHeaderV2.SIZE + + self.HEADER_MAC_SIZE + + self.KEY_BLOB_SIZE + + self.cert_block.raw_size + + self.cert_block.signature_size + ) + if self.header.flags & self.FLAGS_SHA_PRESENT_BIT: + bs_offset += self.SHA_256_SIZE + + if not self._header.nonce: + raise SPSDKError("Invalid header's nonce") + counter = Counter(self._header.nonce, SecBootBlckSize.to_num_blocks(bs_offset)) + for sect in self.boot_sections: + bs_data += sect.export(dek=self.dek, mac=self.mac, counter=counter) + # Export Header + signed_data = self._header.export(padding=padding) + # Add HMAC data + first_bs_hmac_count = self.boot_sections[0].hmac_count + hmac_data = bs_data[ + CmdHeader.SIZE : CmdHeader.SIZE + (first_bs_hmac_count * 32) + 32 + ] + hmac_bytes = hmac(self.mac, hmac_data) + signed_data += hmac_bytes + # Add KeyBlob data + key_blob = aes_key_wrap(self.kek, self.dek + self.mac) + key_blob += b"\00" * (self.KEY_BLOB_SIZE - len(key_blob)) + signed_data += key_blob + # Add Certificates data + signed_data += self.cert_block.export() + # Add SHA-256 of Bootable sections if requested + if self.header.flags & self.FLAGS_SHA_PRESENT_BIT: + signed_data += get_hash(bs_data) + # Add Signature data + signature = self.signature_provider.get_signature(signed_data) + + return signed_data + signature + bs_data + + # pylint: disable=too-many-locals + @classmethod + def parse( + cls, + data: bytes, + offset: int = 0, + kek: bytes = bytes(), + plain_sections: bool = False, + ) -> "BootImageV21": + """Parse image from bytes. + + :param data: Raw data of parsed image + :param offset: The offset of input data + :param kek: The Key for unwrapping DEK and MAC keys (required) + :param plain_sections: Sections are not encrypted; this is used only for debugging, + not supported by ROM code + :return: BootImageV21 parsed object + :raises SPSDKError: raised when header is in incorrect format + :raises SPSDKError: raised when signature is incorrect + :raises SPSDKError: Raised when kek is empty + :raises SPSDKError: raised when header's nonce not present" + """ + if not kek: + raise SPSDKError("kek cannot be empty") + index = offset + header_raw_data = data[index : index + ImageHeaderV2.SIZE] + index += ImageHeaderV2.SIZE + # Not used right now: hmac_data = data[index: index + cls.HEADER_MAC_SIZE] + index += cls.HEADER_MAC_SIZE + key_blob = data[index : index + cls.KEY_BLOB_SIZE] + index += cls.KEY_BLOB_SIZE + key_blob_unwrap = aes_key_unwrap(kek, key_blob[:-8]) + dek = key_blob_unwrap[:32] + mac = key_blob_unwrap[32:] + # Parse Header + header = ImageHeaderV2.parse(header_raw_data) + if header.offset_to_certificate_block != (index - offset): + raise SPSDKError("Invalid offset") + # Parse Certificate Block + cert_block = CertBlockV1.parse(data[index:]) + index += cert_block.raw_size + + # Verify Signature + signature_index = index + # The image may contain SHA, in such a case the signature is placed + # after SHA. Thus we must shift the index by SHA size. + if header.flags & BootImageV21.FLAGS_SHA_PRESENT_BIT: + signature_index += BootImageV21.SHA_256_SIZE + result = cert_block.verify_data( + data[signature_index : signature_index + cert_block.signature_size], + data[offset:signature_index], + ) + + if not result: + raise SPSDKError("Verification failed") + # Check flags, if 0x8000 bit is set, the SB file contains SHA-256 between + # certificate and signature. + if header.flags & BootImageV21.FLAGS_SHA_PRESENT_BIT: + bootable_section_sha256 = data[index : index + BootImageV21.SHA_256_SIZE] + index += BootImageV21.SHA_256_SIZE + index += cert_block.signature_size + # Check first Boot Section HMAC + # Not implemented yet + # hmac_data_calc = hmac(mac, data[index + CmdHeader.SIZE: index + CmdHeader.SIZE + ((2) * 32)]) + # if hmac_data != hmac_data_calc: + # raise SPSDKError("HMAC failed") + if not header.nonce: + raise SPSDKError("Header's nonce not present") + counter = Counter(header.nonce) + counter.increment(SecBootBlckSize.to_num_blocks(index - offset)) + boot_section = BootSectionV2.parse( + data, index, dek=dek, mac=mac, counter=counter, plain_sect=plain_sections + ) + if header.flags & BootImageV21.FLAGS_SHA_PRESENT_BIT: + computed_bootable_section_sha256 = get_hash( + data[index:], algorithm=EnumHashAlgorithm.SHA256 + ) + + if bootable_section_sha256 != computed_bootable_section_sha256: + raise SPSDKError( + desc=( + "Error: invalid Bootable section SHA." + f"Expected {bootable_section_sha256.decode('utf-8')}," + f"got {computed_bootable_section_sha256.decode('utf-8')}" + ) + ) + adv_params = SBV2xAdvancedParams( + dek=dek, mac=mac, nonce=header.nonce, timestamp=header.timestamp + ) + obj = cls( + kek=kek, + product_version=str(header.product_version), + component_version=str(header.component_version), + build_number=header.build_number, + advanced_params=adv_params, + ) + obj.cert_block = cert_block + obj.add_boot_section(boot_section) + return obj + + @staticmethod + def get_supported_families() -> List[str]: + """Return list of supported families. + + :return: List of supported families. + """ + return get_families(DatabaseManager.SB21) + + @classmethod + def get_commands_validation_schemas( + cls, family: Optional[str] = None + ) -> List[Dict[str, Any]]: + """Create the list of validation schemas. + + :param family: Device family filter, if None all commands are returned. + :return: List of validation schemas. + """ + sb2_sch_cfg = get_schema_file(DatabaseManager.SB21) + + schemas: List[Dict[str, Any]] = [sb2_sch_cfg["sb2_sections"]] + if family: + db = get_db(family, "latest") + # remove unused command for current family + supported_commands = db.get_list(DatabaseManager.SB21, "supported_commands") + list_of_commands: List[Dict] = schemas[0]["properties"]["sections"][ + "items" + ]["properties"]["commands"]["items"]["oneOf"] + + schemas[0]["properties"]["sections"]["items"]["properties"]["commands"][ + "items" + ]["oneOf"] = [ + command + for command in list_of_commands + if list(command["properties"].keys())[0] in supported_commands + ] + + return schemas + + @classmethod + def get_validation_schemas( + cls, family: Optional[str] = None + ) -> List[Dict[str, Any]]: + """Create the list of validation schemas. + + :param family: Device family + :return: List of validation schemas. + """ + sb2_schema = get_schema_file(DatabaseManager.SB21) + mbi_schema = get_schema_file(DatabaseManager.MBI) + + schemas: List[Dict[str, Any]] = [] + schemas.extend([mbi_schema[x] for x in ["signature_provider", "cert_block_v1"]]) + schemas.extend( + [sb2_schema[x] for x in ["sb2_output", "sb2_family", "common", "sb2"]] + ) + + add_keyblob = True + + if family: + add_keyblob = get_db(family, "latest").get_bool( + DatabaseManager.SB21, "keyblobs", default=True + ) + + if add_keyblob: + schemas.append(sb2_schema["keyblobs"]) + schemas.extend(cls.get_commands_validation_schemas(family)) + + # find family + for schema in schemas: + if "properties" in schema and "family" in schema["properties"]: + if family: + schema["properties"]["family"]["template_value"] = family + schema["properties"]["family"]["enum"] = cls.get_supported_families() + if family: + schema["properties"]["family"]["template_value"] = family + break + + return schemas + + @classmethod + def generate_config_template(cls, family: Optional[str]) -> str: + """Generate configuration template. + + :param family: Device family. + :return: Dictionary of individual templates (key is name of template, value is template itself). + """ + title = "Secure Binary v2.1 Configuration template" + if family in cls.get_supported_families(): + title += f" for {family}" + return CommentedConfig( + title, + cls.get_validation_schemas(family), + ).get_template() + + @classmethod + def parse_sb21_config( + cls, + config_path: str, + external_files: Optional[List[str]] = None, + ) -> Dict[Any, Any]: + """Create lexer and parser, load the BD file content and parse it. + + :param config_path: Path to configuration file either BD or YAML formatted. + :param external_files: Optional list of external files for BD processing + :return: Dictionary with parsed configuration. + """ + try: + bd_file_content = load_text(config_path) + parser = bd_parser.BDParser() + parsed_conf = parser.parse(text=bd_file_content, extern=external_files) + if parsed_conf is None: + raise SPSDKError( + "Invalid bd file, secure binary file generation terminated" + ) + except SPSDKError: + parsed_conf = load_configuration(config_path) + config_dir = os.path.dirname(config_path) + family = parsed_conf.get("family") + schemas = BootImageV21.get_validation_schemas(family) + check_config(parsed_conf, schemas, search_paths=[config_dir]) + + return parsed_conf + + @classmethod + def load_from_config( + cls, + config: Dict[str, Any], + key_file_path: Optional[str] = None, + signature_provider: Optional[SignatureProvider] = None, + signing_certificate_file_paths: Optional[List[str]] = None, + root_key_certificate_paths: Optional[List[str]] = None, + rkth_out_path: Optional[str] = None, + search_paths: Optional[List[str]] = None, + ) -> "BootImageV21": + """Creates an instance of BootImageV21 from configuration. + + :param config: Input standard configuration. + :param key_file_path: path to key file. + :param signature_provider: Signature provider to sign final image + :param signing_certificate_file_paths: signing certificate chain. + :param root_key_certificate_paths: paths to root key certificate(s) for + verifying other certificates. Only 4 root key certificates are allowed, + others are ignored. One of the certificates must match the first certificate + passed in signing_certificate_file_paths. + :param rkth_out_path: output path to hash of hashes of root keys. If set to + None, 'hash.bin' is created under working directory. + :param search_paths: List of paths where to search for the file, defaults to None + :return: Instance of Secure Binary V2.1 class + """ + flags = config["options"].get( + "flags", + BootImageV21.FLAGS_SHA_PRESENT_BIT + | BootImageV21.FLAGS_ENCRYPTED_SIGNED_BIT, + ) + # Flags may be a hex string + flags = value_to_int(flags) + + product_version = config["options"].get("productVersion", "1.0.0") + component_version = config["options"].get("componentVersion", "1.0.0") + + if signing_certificate_file_paths and root_key_certificate_paths: + build_number = config["options"].get("buildNumber", 1) + cert_block = CertBlockV1(build_number=build_number) + for cert_path in signing_certificate_file_paths: + cert = Certificate.load(cert_path) + cert_block.add_certificate(cert) + for cert_idx, cert_path in enumerate(root_key_certificate_paths): + cert = Certificate.load(cert_path) + cert_block.set_root_key_hash(cert_idx, cert) + else: + cert_block = CertBlockV1.from_config(config, search_paths=search_paths) + + if key_file_path: + key = key_file_path + else: + key = config["containerKeyBlobEncryptionKey"] + + sb_kek = load_hex_string(key, expected_size=32, search_paths=search_paths) + + # validate keyblobs and perform appropriate actions + keyblobs = config.get("keyblobs", []) + + sb21_helper = SB21Helper(search_paths) + sb_sections = [] + sections = config["sections"] + for section_id, section in enumerate(sections): + commands = [] + for cmd in section["commands"]: + for key, value in cmd.items(): + # we use a helper function, based on the key ('load', 'erase' + # etc.) to create a command object. The helper function knows + # how to handle the parameters of each command. + cmd_fce = sb21_helper.get_command(key) + if key in ("keywrap", "encrypt"): + keyblob = {"keyblobs": keyblobs} + value.update(keyblob) + cmd = cmd_fce(value) + commands.append(cmd) + + sb_sections.append(BootSectionV2(section_id, *commands)) + + # We have a list of sections and their respective commands, lets create + # a boot image v2.1 object + secure_binary = BootImageV21( + sb_kek, + *sb_sections, + product_version=product_version, + component_version=component_version, + build_number=cert_block.header.build_number, + flags=flags, + ) + + # We have our secure binary, now we attach to it the certificate block and + # the private key content + secure_binary.cert_block = cert_block + + if not signature_provider: + signing_key_path = config.get( + "signPrivateKey", config.get("mainCertPrivateKeyFile") + ) + signature_provider = get_signature_provider( + sp_cfg=config.get("signProvider"), + local_file_key=signing_key_path, + search_paths=search_paths, + ) + + secure_binary.signature_provider = signature_provider + + if not rkth_out_path: + rkth_out_path = config.get( + "RKTHOutputPath", os.path.join(os.getcwd(), "hash.bin") + ) + assert isinstance(rkth_out_path, str), "Hash of hashes path must be string" + write_file(secure_binary.cert_block.rkth, rkth_out_path, mode="wb") + + return secure_binary diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sb_21_helper.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sb_21_helper.py new file mode 100644 index 00000000..9c5c870b --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sb_21_helper.py @@ -0,0 +1,486 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2021-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""Module containing helper functions for nxpimage.""" + +import logging +import struct +from numbers import Number +from typing import Callable, Dict, List, Optional, Union + +from ...exceptions import SPSDKError +from ...mboot.memories import ExtMemId, MemId +from ...sbfile.sb2.commands import ( + CmdBaseClass, + CmdErase, + CmdFill, + CmdJump, + CmdKeyStoreBackup, + CmdKeyStoreRestore, + CmdLoad, + CmdMemEnable, + CmdProg, + CmdVersionCheck, + VersionCheckType, +) +from ...utils.crypto.otfad import KeyBlob +from ...utils.misc import ( + align_block, + get_bytes_cnt_of_int, + load_binary, + swap32, + value_to_bytes, + value_to_int, +) + +logger = logging.getLogger(__name__) + + +class SB21Helper: + """SB21 Helper class.""" + + def __init__(self, search_paths: Optional[List[str]] = None): + """SB21 helper constructor.""" + self.search_paths = search_paths + self.cmds = { + "load": self._load, + "fill": self._fill_memory, + "erase": self._erase_cmd_handler, + "enable": self._enable, + "encrypt": self._encrypt, + "keywrap": self._keywrap, + "keystore_to_nv": self._keystore_to_nv, + "keystore_from_nv": self._keystore_from_nv, + "version_check": self._version_check, + "jump": self._jump, + "programFuses": self._prog, + } + + @staticmethod + def get_mem_id(mem_opt: Union[int, str]) -> int: + """Get memory ID from str or int in BD file. + + :param mem_opt: memory option in BD file + :raises SPSDKError: if memory option is not supported + :return: int memory ID + """ + if isinstance(mem_opt, int): + return mem_opt + if isinstance(mem_opt, str): + try: + return int(mem_opt, 0) + except ValueError: + mem_id = MemId.get_legacy_str(mem_opt) + if mem_id: + return mem_id + raise SPSDKError(f"Unsupported memory option: {mem_opt}") + + def get_command(self, cmd_name: str) -> Callable[[Dict], CmdBaseClass]: + """Returns a function based on input argument name. + + The json file generated by bd file parser uses command names (load, fill, + etc.). These names are used to get the proper function name, which creates + corresponding object. + + :param cmd_name: one of 'load', 'fill', 'erase', 'enable', 'reset', 'encrypt', + 'keywrap' + :return: appropriate Command object + """ + command_object = self.cmds[cmd_name] + return command_object + + def _fill_memory(self, cmd_args: dict) -> CmdFill: + """Returns a CmdFill object initialized based on cmd_args. + + Fill is a type of load command used for filling a region of memory with pattern. + + Example: + section(0) { + // pattern fill + load 0x55.b > 0x2000..0x3000; + // load two bytes at an address + load 0x1122.h > 0xf00; + } + + :param cmd_args: dictionary holding address and pattern + :return: CmdFill object + """ + address = value_to_int(cmd_args["address"]) + pattern = value_to_int(cmd_args["pattern"]) + return CmdFill(address=address, pattern=pattern) + + def _load(self, cmd_args: dict) -> Union[CmdLoad, CmdProg]: + """Returns a CmdLoad object initialized based on cmd_args. + + The load statement is used to store data into the memory. + The load command is also used to write to the flash memory. + When loading to the flash memory, the region being loaded to must be erased before to the load operation. + The most common form of a load statement is loading a source file by name. + Only plain binary images are supported. + + Example: + section (0) { + // load an entire binary file to an address + load myBinFile > 0x70000000; + // load an eight byte blob + load {{ ff 2e 90 07 77 5f 1d 20 }} > 0xa0000000; + // 4 byte load IFR statement + load ifr 0x1234567 > 0x30; + // Program fuse statement + load fuse {{00 00 00 01}} > 0x01000188; + // load to sdcard + load sdcard {{aa bb cc dd}} > 0x08000188; + load @288 {{aa bb cc dd}} > 0x08000188; + } + + :param cmd_args: dictionary holding path to file or values and address + :raises SPSDKError: If dict doesn't contain 'file' or 'values' key + :return: CmdLoad object + """ + prog_mem_id = 4 + address = value_to_int(cmd_args["address"]) + load_opt = cmd_args.get("load_opt") + mem_id = 0 + if load_opt: + mem_id = self.get_mem_id(load_opt) + + # general non-authenticated load command + if cmd_args.get("file"): + data = load_binary(cmd_args["file"], self.search_paths) + return CmdLoad(address=address, data=data, mem_id=mem_id) + if cmd_args.get("values"): + # if the memory ID is fuse or IFR change load command to program command + if mem_id == prog_mem_id: + return self._prog(cmd_args) + + values = [int(s, 16) for s in cmd_args["values"].split(",")] + if max(values) > 0xFFFFFFFF or min(values) < 0: + raise SPSDKError( + f"Invalid values for load command, values: {(values)}" + + ", expected unsigned 32bit comma separated values" + ) + data = struct.pack(f"<{len(values)}L", *values) + return CmdLoad(address=address, data=data, mem_id=mem_id) + if cmd_args.get("pattern"): + # if the memory ID is fuse or IFR change load command to program command + # pattern in this case represents 32b int data word 1 + if mem_id == prog_mem_id: + return self._prog(cmd_args) + + raise SPSDKError(f"Unsupported LOAD command args: {cmd_args}") + + def _prog(self, cmd_args: dict) -> CmdProg: + """Returns a CmdProg object initialized based on cmd_args. + + :param cmd_args: dictionary holding path to file or values and address + :raises SPSDKError: If data words are wrong + :return: CmdProg object + """ + address = value_to_int(cmd_args["address"]) + mem_id = self.get_mem_id(cmd_args.get("load_opt", 4)) + data_word1 = 0 + data_word2 = 0 + # values provided as binary blob {{aa bb cc dd}} either 4 or 8 bytes: + if cmd_args.get("values"): + int_value = int(cmd_args["values"], 16) + byte_count = get_bytes_cnt_of_int(int_value) + + if byte_count <= 4: + data_word1 = int_value + elif byte_count <= 8: + data_words = value_to_bytes(int_value, byte_cnt=8) + data_word1 = value_to_int(data_words[:4]) + data_word2 = value_to_int(data_words[4:]) + else: + raise SPSDKError("Program operation requires 4 or 8 byte segment") + + # swap byte order + data_word1 = swap32(data_word1) + data_word2 = swap32(data_word2) + + # values provided as integer e.g. 0x1000 represents data_word1 + elif cmd_args.get("pattern"): + int_value = value_to_int(cmd_args["pattern"]) + byte_count = get_bytes_cnt_of_int(int_value) + + if byte_count <= 4: + data_word1 = int_value + else: + raise SPSDKError("Data word 1 must be 4 bytes long") + else: + raise SPSDKError("Unsupported program command arguments") + + return CmdProg( + address=address, data_word1=data_word1, data_word2=data_word2, mem_id=mem_id + ) + + def _erase_cmd_handler(self, cmd_args: dict) -> CmdErase: + """Returns a CmdErase object initialized based on cmd_args. + + The erase statement inserts a bootloader command to erase the flash memory. + There are two forms of the erase statement. The simplest form (erase all) + creates a command that erases the available flash memory. + The actual effect of this command depends on the runtime settings + of the bootloader and whether + the bootloader resides in the flash, ROM, or RAM. + + Example: + section (0){ + // Erase all + erase all; + // Erase unsecure all + erase unsecure all; + // erase statements specifying memory ID and range + erase @8 all; + erase @288 0x8001000..0x80074A4; + erase sdcard 0x8001000..0x80074A4; + erase mmccard 0x8001000..0x80074A4; + } + + :param cmd_args: dictionary holding path to address, length and flags + :return: CmdErase object + """ + address = value_to_int(cmd_args["address"]) + length = value_to_int(cmd_args.get("length", 0)) + flags = cmd_args.get("flags", 0) + + mem_opt = cmd_args.get("mem_opt") + mem_id = 0 + if mem_opt: + mem_id = self.get_mem_id(mem_opt) + + return CmdErase(address=address, length=length, flags=flags, mem_id=mem_id) + + def _enable(self, cmd_args: dict) -> CmdMemEnable: + """Returns a CmdEnable object initialized based on cmd_args. + + Enable statement is used for initialization of external memories + using a parameter block that was previously loaded to RAM. + + Example: + section (0){ + # Load quadspi config block bin file to RAM, use it to enable QSPI. + load myBinFile > 0x20001000; + enable qspi 0x20001000; + } + + :param cmd_args: dictionary holding address, size and memory type + :return: CmdEnable object + """ + address = value_to_int(cmd_args["address"]) + size = cmd_args.get("size", 4) + mem_opt = cmd_args.get("mem_opt") + mem_id = 0 + if mem_opt: + mem_id = self.get_mem_id(mem_opt) + return CmdMemEnable(address=address, size=size, mem_id=mem_id) + + def _encrypt(self, cmd_args: dict) -> CmdLoad: + """Returns a CmdLoad object initialized based on cmd_args. + + Encrypt holds an ID, which is a reference to keyblob to be used for + encryption. So the encrypt command requires a list of keyblobs, the keyblob + ID and load command. + + e.g. + encrypt (0){ + load myImage > 0x0810000; + } + + :param cmd_args: dictionary holding list of keyblobs, keyblob ID and load dict + :raises SPSDKError: If keyblob to be used is not in the list or is invalid + :return: CmdLoad object + """ + keyblob_id = cmd_args["keyblob_id"] + keyblobs = cmd_args.get("keyblobs", []) + + address = value_to_int(cmd_args["address"]) + + if cmd_args.get("file"): + data = load_binary(cmd_args["file"], self.search_paths) + if cmd_args.get("values"): + values = [int(s, 16) for s in cmd_args["values"].split(",")] + data = struct.pack(f"<{len(values)}L", *values) + + try: + valid_keyblob = self._validate_keyblob(keyblobs, keyblob_id) + except SPSDKError as exc: + raise SPSDKError(f"Invalid key blob {str(exc)}") from exc + + if valid_keyblob is None: + raise SPSDKError(f"Missing keyblob {keyblob_id} for encryption.") + + start_addr = value_to_int(valid_keyblob["keyblob_content"][0]["start"]) + end_addr = value_to_int(valid_keyblob["keyblob_content"][0]["end"]) + key = bytes.fromhex(valid_keyblob["keyblob_content"][0]["key"]) + counter = bytes.fromhex(valid_keyblob["keyblob_content"][0]["counter"]) + byte_swap = valid_keyblob["keyblob_content"][0].get("byte_swap", False) + + keyblob = KeyBlob( + start_addr=start_addr, end_addr=end_addr, key=key, counter_iv=counter + ) + + # Encrypt only if the ADE and VLD flags are set + if bool(end_addr & keyblob.KEY_FLAG_ADE) and bool( + end_addr & keyblob.KEY_FLAG_VLD + ): + encoded_data = keyblob.encrypt_image( + base_address=address, data=align_block(data, 512), byte_swap=byte_swap + ) + else: + encoded_data = data + + return CmdLoad(address, encoded_data) + + def _keywrap(self, cmd_args: dict) -> CmdLoad: + """Returns a CmdLoad object initialized based on cmd_args. + + Keywrap holds keyblob ID to be encoded by a value stored in load command and + stored to address defined in the load command. + + Example: + keywrap (0) { + load {{ 00000000 }} > 0x08000000; + } + + :param cmd_args: dictionary holding list of keyblobs, keyblob ID and load dict + :raises SPSDKError: If keyblob to be used is not in the list or is invalid + :return: CmdLoad object + """ + # iterate over keyblobs + keyblobs = cmd_args.get("keyblobs", None) + keyblob_id = cmd_args.get("keyblob_id", None) + + address = value_to_int(cmd_args["address"]) + otfad_key = cmd_args["values"] + + try: + valid_keyblob = self._validate_keyblob(keyblobs, keyblob_id) + except SPSDKError as exc: + raise SPSDKError(f" Key blob validation failed: {str(exc)}") from exc + if valid_keyblob is None: + raise SPSDKError(f"Missing keyblob {keyblob_id} for given keywrap") + + start_addr = value_to_int(valid_keyblob["keyblob_content"][0]["start"]) + end_addr = value_to_int(valid_keyblob["keyblob_content"][0]["end"]) + key = bytes.fromhex(valid_keyblob["keyblob_content"][0]["key"]) + counter = bytes.fromhex(valid_keyblob["keyblob_content"][0]["counter"]) + + blob = KeyBlob( + start_addr=start_addr, end_addr=end_addr, key=key, counter_iv=counter + ) + + encoded_keyblob = blob.export(kek=otfad_key) + logger.info(f"Creating wrapped keyblob: \n{str(blob)}") + + return CmdLoad(address=address, data=encoded_keyblob) + + def _keystore_to_nv(self, cmd_args: dict) -> CmdKeyStoreRestore: + """Returns a CmdKeyStoreRestore object initialized with memory type and address. + + The keystore_to_nv statement instructs the bootloader to load the backed up + keystore values back into keystore memory region on non-volatile memory. + + Example: + section (0) { + keystore_to_nv @9 0x8000800; + + :param cmd_args: dictionary holding the memory type and address. + :return: CmdKeyStoreRestore object. + """ + mem_opt = cmd_args["mem_opt"] + address = value_to_int(cmd_args["address"]) + return CmdKeyStoreRestore(address, ExtMemId.from_tag(mem_opt)) + + def _keystore_from_nv(self, cmd_args: dict) -> CmdKeyStoreBackup: + """Returns a CmdKeyStoreRestore object initialized with memory type and address. + + The keystore_to_nv statement instructs the bootloader to load the backed up + keystore values back into keystore memory region on non-volatile memory. + + Example: + section (0) { + keystore_from_nv @9 0x8000800; + + :param cmd_args: dictionary holding the memory type and address. + :return: CmdKeyStoreRestore object. + """ + mem_opt = cmd_args["mem_opt"] + address = value_to_int(cmd_args["address"]) + return CmdKeyStoreBackup(address, ExtMemId.from_tag(mem_opt)) + + def _version_check(self, cmd_args: dict) -> CmdVersionCheck: + """Returns a CmdVersionCheck object initialized with version check type and version. + + Validates version of secure or non-secure firmware version with the value stored in the OTP or PFR, + to prevent the FW rollback. + The command fails if version provided in command is lower than version stored in the OTP/PFR. + + Example: + section (0) { + version_check sec 0x2; + version_check nsec 2; + } + + :param cmd_args: dictionary holding the version type and fw version. + :return: CmdKeyStoreRestore object. + """ + ver_type = cmd_args["ver_type"] + fw_version = cmd_args["fw_version"] + return CmdVersionCheck(VersionCheckType.from_tag(ver_type), fw_version) + + def _validate_keyblob(self, keyblobs: List, keyblob_id: Number) -> Optional[Dict]: + """Checks, whether a keyblob is valid. + + Parser returns a list of dicts which contains keyblob definitions. These + definitions should contain a 'start', 'end', 'key' & 'counter' keys with + appropriate values. To be able to create a keyblob, we need these for + values. Otherwise we throw an exception that the keyblob is invalid. + + :param keyblobs: list of dicts defining keyblobs + :param keyblob_id: id of keyblob we want to check + :raises SPSDKError: If the keyblob definition is empty + :raises SPSDKError: If the keyblob definition is missing one key + :return: keyblob If exists and is valid, None otherwise + """ + for keyblob in keyblobs: + if keyblob_id == keyblob["keyblob_id"]: + kb_content = keyblob["keyblob_content"] + if len(kb_content) == 0: + raise SPSDKError(f"Keyblob {keyblob_id} definition is empty!") + + for key in ["start", "end", "key", "counter"]: + if key not in kb_content[0]: + raise SPSDKError( + f"Keyblob {keyblob_id} is missing '{key}' definition!" + ) + + return keyblob + + return None + + def _jump(self, cmd_args: dict) -> CmdJump: + """Returns a CmdJump object initialized with memory type and address. + + The "jump" command produces the ROM_JUMP_CMD. + See the boot image format design document for specific details about these commands, + such as the function prototypes they expect. + Jump to entrypoint is not supported. Only fixed address is supported. + + Example: + section (0) { + # jump to a fixed address + jump 0xffff0000; + } + + :param cmd_args: dictionary holding the argument and address. + :return: CmdJump object. + """ + argument = cmd_args.get("argument", 0) + address = value_to_int(cmd_args["address"]) + spreg = cmd_args.get("spreg") + + return CmdJump(address, argument, spreg) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sections.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sections.py new file mode 100644 index 00000000..4fe41a88 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sections.py @@ -0,0 +1,397 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Sections within SBfile.""" + +from struct import unpack_from +from typing import Iterator, List, Optional + +from ...crypto.hmac import hmac +from ...crypto.symmetric import Counter, aes_ctr_decrypt, aes_ctr_encrypt +from ...exceptions import SPSDKError +from ...sbfile.misc import SecBootBlckSize +from ...utils.abstract import BaseClass +from ...utils.crypto.cert_blocks import CertBlockV1 +from .commands import ( + CmdBaseClass, + CmdHeader, + EnumCmdTag, + EnumSectionFlag, + parse_command, +) + +######################################################################################################################## +# Boot Image Sections +######################################################################################################################## + + +class BootSectionV2(BaseClass): + """Boot Section V2.""" + + HMAC_SIZE = 32 + + @property + def uid(self) -> int: + """Boot Section UID.""" + return self._header.address + + @uid.setter + def uid(self, value: int) -> None: + self._header.address = value + + @property + def is_last(self) -> bool: + """Check whether the section is the last one.""" + return self._header.flags & EnumSectionFlag.LAST_SECT.tag != 0 + + @is_last.setter + def is_last(self, value: bool) -> None: + assert isinstance(value, bool) + self._header.flags = EnumSectionFlag.BOOTABLE.tag + if value: + self._header.flags |= EnumSectionFlag.LAST_SECT.tag + + @property + def hmac_count(self) -> int: + """Number of HMACs.""" + raw_size = 0 + hmac_count = 0 + for cmd in self._commands: + raw_size += cmd.raw_size + if raw_size > 0: + block_count = (raw_size + 15) // 16 + hmac_count = ( + self._hmac_count if block_count >= self._hmac_count else block_count + ) + return hmac_count + + @property + def raw_size(self) -> int: + """Raw size of section.""" + size = CmdHeader.SIZE + self.HMAC_SIZE + size += self.hmac_count * self.HMAC_SIZE + for cmd in self._commands: + size += cmd.raw_size + if size % 16: + size += 16 - (size % 16) + return size + + def __init__(self, uid: int, *commands: CmdBaseClass, hmac_count: int = 1) -> None: + """Initialize BootSectionV2. + + :param uid: section unique identification + :param commands: List of commands + :param hmac_count: The number of HMAC entries + """ + self._header = CmdHeader(EnumCmdTag.TAG.tag, EnumSectionFlag.BOOTABLE.tag) + self._commands: List[CmdBaseClass] = [] + self._hmac_count = hmac_count + for cmd in commands: + self.append(cmd) + # Initialize HMAC count + if not isinstance(self._hmac_count, int) or self._hmac_count == 0: + self._hmac_count = 1 + # section UID + self.uid = uid + + def __len__(self) -> int: + return len(self._commands) + + def __getitem__(self, key: int) -> CmdBaseClass: + return self._commands[key] + + def __setitem__(self, key: int, value: CmdBaseClass) -> None: + self._commands[key] = value + + def __iter__(self) -> Iterator[CmdBaseClass]: + return self._commands.__iter__() + + def append(self, cmd: CmdBaseClass) -> None: + """Add command to section.""" + assert isinstance(cmd, CmdBaseClass) + self._commands.append(cmd) + + def __repr__(self) -> str: + return f"BootSectionV2: {len(self)} commands." + + def __str__(self) -> str: + """Get object info.""" + nfo = "" + for index, cmd in enumerate(self._commands): + nfo += f" {index}) {str(cmd)}\n" + return nfo + + # pylint: disable=too-many-locals + def export( + self, + dek: bytes = b"", + mac: bytes = b"", + counter: Optional[Counter] = None, + ) -> bytes: + """Serialize Boot Section object. + + :param dek: The DEK value in bytes (required) + :param mac: The MAC value in bytes (required) + :param counter: The counter object (required) + :return: exported bytes + :raises SPSDKError: raised when dek, mac, counter have invalid format or no commands + """ + if not isinstance(dek, bytes): + raise SPSDKError("Invalid type of dek, should be bytes") + if not isinstance(mac, bytes): + raise SPSDKError("Invalid type of mac, should be bytes") + if not isinstance(counter, Counter): + raise SPSDKError("Invalid type of counter") + if not self._commands: + raise SPSDKError("SB2 must contain commands") + # Export commands + commands_data = b"" + for cmd in self._commands: + cmd_data = cmd.export() + commands_data += cmd_data + if len(commands_data) % 16: + commands_data += b"\x00" * (16 - (len(commands_data) % 16)) + # Encrypt header + self._header.data = self.hmac_count + self._header.count = len(commands_data) // 16 + encrypted_header = aes_ctr_encrypt(dek, self._header.export(), counter.value) + hmac_data = hmac(mac, encrypted_header) + counter.increment(1 + (self.hmac_count + 1) * 2) + + # Encrypt commands + encrypted_commands = b"" + for index in range(0, len(commands_data), 16): + encrypted_block = aes_ctr_encrypt( + dek, commands_data[index : index + 16], counter.value + ) + encrypted_commands += encrypted_block + counter.increment() + # Calculate HMAC of commands + index = 0 + hmac_count = self._header.data + block_size = (self._header.count // hmac_count) * 16 + while hmac_count > 0: + enc_block = ( + encrypted_commands[index:] + if hmac_count == 1 + else encrypted_commands[index : index + block_size] + ) + hmac_data += hmac(mac, enc_block) + hmac_count -= 1 + index += len(enc_block) + return encrypted_header + hmac_data + encrypted_commands + + # pylint: disable=too-many-locals + @classmethod + def parse( + cls, + data: bytes, + offset: int = 0, + plain_sect: bool = False, + dek: bytes = b"", + mac: bytes = b"", + counter: Optional[Counter] = None, + ) -> "BootSectionV2": + """Parse Boot Section from bytes. + + :param data: Raw data of parsed image + :param offset: The offset of input data + :param plain_sect: If the sections are not encrypted; It is used for debugging only, not supported by ROM code + :param dek: The DEK value in bytes (required) + :param mac: The MAC value in bytes (required) + :param counter: The counter object (required) + :return: exported bytes + :raises SPSDKError: raised when dek, mac, counter have invalid format + """ + if not isinstance(dek, bytes): + raise SPSDKError("Invalid type of dek, should be bytes") + if not isinstance(mac, bytes): + raise SPSDKError("Invalid type of mac, should be bytes") + if not isinstance(counter, Counter): + raise SPSDKError("Invalid type of counter") + # Get Header specific data + header_encrypted = data[offset : offset + CmdHeader.SIZE] + header_hmac_data = data[ + offset + CmdHeader.SIZE : offset + CmdHeader.SIZE + cls.HMAC_SIZE + ] + offset += CmdHeader.SIZE + cls.HMAC_SIZE + # Check header HMAC + if header_hmac_data != hmac(mac, header_encrypted): + raise SPSDKError("Invalid header HMAC") + # Decrypt header + header_decrypted = aes_ctr_decrypt(dek, header_encrypted, counter.value) + counter.increment() + # Parse header + header = CmdHeader.parse(header_decrypted) + counter.increment((header.data + 1) * 2) + # Get HMAC data + hmac_data = data[offset : offset + (cls.HMAC_SIZE * header.data)] + offset += cls.HMAC_SIZE * header.data + encrypted_commands = data[offset : offset + (header.count * 16)] + # Check HMAC + hmac_index = 0 + hmac_count = header.data + block_size = (header.count // hmac_count) * 16 + section_size = header.count * 16 + while hmac_count > 0: + if hmac_count == 1: + block_size = section_size + hmac_block = hmac(mac, data[offset : offset + block_size]) + if hmac_block != hmac_data[hmac_index : hmac_index + cls.HMAC_SIZE]: + raise SPSDKError("HMAC failed") + hmac_count -= 1 + hmac_index += cls.HMAC_SIZE + section_size -= block_size + offset += block_size + # Decrypt commands + decrypted_commands = b"" + for hmac_index in range(0, len(encrypted_commands), 16): + encr_block = encrypted_commands[hmac_index : hmac_index + 16] + decrypted_block = ( + encr_block + if plain_sect + else aes_ctr_decrypt(dek, encr_block, counter.value) + ) + decrypted_commands += decrypted_block + counter.increment() + # ... + cmd_offset = 0 + obj = cls(header.address, hmac_count=header.data) + while cmd_offset < len(decrypted_commands): + cmd_obj = parse_command(decrypted_commands[cmd_offset:]) + cmd_offset += cmd_obj.raw_size + obj.append(cmd_obj) + return obj + + +class CertSectionV2(BaseClass): + """Certificate Section V2 class.""" + + HMAC_SIZE = 32 + SECT_MARK = unpack_from(" CertBlockV1: + """Return certification block.""" + return self._cert_block + + @property + def raw_size(self) -> int: + """Calculate raw size of section.""" + # Section header size + size = CmdHeader.SIZE + # Header HMAC 32 bytes + Certificate block HMAC 32 bytes + size += self.HMAC_SIZE * 2 + # Certificate block size in bytes + size += self.cert_block.raw_size + return size + + def __init__(self, cert_block: CertBlockV1): + """Initialize CertBlockV1.""" + assert isinstance(cert_block, CertBlockV1) + self._header = CmdHeader( + EnumCmdTag.TAG.tag, + EnumSectionFlag.CLEARTEXT.tag | EnumSectionFlag.LAST_SECT.tag, + ) + self._header.address = self.SECT_MARK + self._header.count = cert_block.raw_size // 16 + self._header.data = 1 + self._cert_block = cert_block + + def __repr__(self) -> str: + return f"CertSectionV2: Length={self._header.count * 16}" + + def __str__(self) -> str: + """Get object info.""" + return str(self.cert_block) + + def export( + self, dek: bytes = b"", mac: bytes = b"", counter: Optional[Counter] = None + ) -> bytes: + """Serialize Certificate Section object. + + :param dek: The DEK value in bytes (required) + :param mac: The MAC value in bytes (required) + :param counter: The counter object (required) + :return: exported bytes + :raises SPSDKError: raised when dek, mac, counter have invalid format + :raises SPSDKError: Raised size of exported bytes is invalid + """ + if not isinstance(dek, bytes): + raise SPSDKError("DEK value is not in bytes") + if not isinstance(mac, bytes): + raise SPSDKError("MAC value is not in bytes") + if not isinstance(counter, Counter): + raise SPSDKError("Counter value is not incorrect") + # Prepare Header data + header_data = self._header.export() + header_encrypted = aes_ctr_encrypt(dek, header_data, counter.value) + # counter.increment() + # Prepare Certificate Block data + body_data = self.cert_block.export() + # Prepare HMAC data + hmac_data = hmac(mac, header_encrypted) + hmac_data += hmac(mac, body_data) + result = header_encrypted + hmac_data + body_data + if len(result) != self.raw_size: + raise SPSDKError("Invalid size") + return result + + @classmethod + def parse( + cls, + data: bytes, + offset: int = 0, + dek: bytes = b"", + mac: bytes = b"", + counter: Optional[Counter] = None, + ) -> "CertSectionV2": + """Parse Certificate Section from bytes array. + + :param data: Raw data of parsed image + :param offset: The offset of input data + :param dek: The DEK value in bytes (required) + :param mac: The MAC value in bytes (required) + :param counter: The counter object (required) + :return: parsed cert section v2 object + :raises SPSDKError: Raised when dek, mac, counter are not valid + :raises SPSDKError: Raised when there is invalid header HMAC, TAG, FLAGS, Mark + :raises SPSDKError: Raised when there is invalid certificate block HMAC + """ + if not isinstance(dek, bytes): + raise SPSDKError("DEK value has invalid format") + if not isinstance(mac, bytes): + raise SPSDKError("MAC value has invalid format") + if not isinstance(counter, Counter): + raise SPSDKError("Counter value has invalid format") + index = offset + header_encrypted = data[index : index + CmdHeader.SIZE] + index += CmdHeader.SIZE + header_hmac = data[index : index + cls.HMAC_SIZE] + index += cls.HMAC_SIZE + cert_block_hmac = data[index : index + cls.HMAC_SIZE] + index += cls.HMAC_SIZE + if header_hmac != hmac(mac, header_encrypted): + raise SPSDKError("Invalid Header HMAC") + header_encrypted = aes_ctr_decrypt(dek, header_encrypted, counter.value) + header = CmdHeader.parse(header_encrypted) + if header.tag != EnumCmdTag.TAG: + raise SPSDKError(f"Invalid Header TAG: 0x{header.tag:02X}") + if header.flags != ( + EnumSectionFlag.CLEARTEXT.tag | EnumSectionFlag.LAST_SECT.tag + ): + raise SPSDKError(f"Invalid Header FLAGS: 0x{header.flags:02X}") + if header.address != cls.SECT_MARK: + raise SPSDKError(f"Invalid Section Mark: 0x{header.address:08X}") + # Parse Certificate Block + cert_block = CertBlockV1.parse(data[index:]) + if cert_block_hmac != hmac(mac, data[index : index + cert_block.raw_size]): + raise SPSDKError("Invalid Certificate Block HMAC") + index += cert_block.raw_size + cert_section_obj = cls(cert_block) + counter.increment(SecBootBlckSize.to_num_blocks(index - offset)) + return cert_section_obj diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_lexer.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_lexer.py new file mode 100644 index 00000000..594502ce --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_lexer.py @@ -0,0 +1,366 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2021-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Lexer for command (BD) files used by parser.""" + +from typing import List, Union + +from sly import Lexer +from sly.lex import Token + + +# pylint: disable=undefined-variable,invalid-name,no-self-use +# undefined-variable : the lexer uses '_' as a decorator, which throws undefined +# variable error. We can't do much with it. +# invalid-name : tokens are defined as upper case. However this violates the +# snake cae naming style. We can't do much, as this is required by the lexer. +# no-self-use : the public methods must be defined as class methods although +# the self is not used at all. +class Variable: + """Class representing a variable in command file.""" + + def __init__(self, name: str, token: str, value: Union[str, int, float]) -> None: + """Initializer. + + :param name: name of identifier (variable) + :param token: type of variable (option, constant etc.) + :param value: the content of the variable + """ + self.name = name + self.t = token + self.value = value + + def __str__(self) -> str: + """Returns a string with variable info. + + i.e. + ", , " + + :return: string + """ + return f"{self.name}, {self.t}, {self.value}" + + +class BDLexer(Lexer): # type: ignore + """Lexer for bd files.""" + + def __init__(self) -> None: + """Initializer.""" + self._sources: List[Variable] = [] + + def cleanup(self) -> None: + """Resets the lexers internals into initial state.""" + self._sources.clear() + + def add_source(self, source: Variable) -> None: + """Append an identifier of source type into list. + + :param source: identifier defined under sources block in BD file + """ + self._sources.append(source) + + # List of reserved keywords + reserved = { + "call": "CALL", + "constants": "CONSTANTS", + "extern": "EXTERN", + "erase": "ERASE", + "false": "FALSE", + "filters": "FILTERS", + "from": "FROM", + "jump": "JUMP", + "load": "LOAD", + "mode": "MODE", + "else": "ELSE", + "info": "INFO", + "error": "ERROR", + "enable": "ENABLE", + "keywrap": "KEYWRAP", + "keystore_to_nv": "KEYSTORE_TO_NV", + "keystore_from_nv": "KEYSTORE_FROM_NV", + "all": "ALL", + "no": "NO", + "options": "OPTIONS", + "raw": "RAW", + "section": "SECTION", + "sources": "SOURCES", + "switch": "SWITCH", + "true": "TRUE", + "yes": "YES", + "if": "IF", + "defined": "DEFINED", + "warning": "WARNING", + "sizeof": "SIZEOF", + "unsecure": "UNSECURE", + "jump_sp": "JUMP_SP", + "keyblob": "KEYBLOB", + "reset": "RESET", + "encrypt": "ENCRYPT", + "version_check": "VERSION_CHECK", + "sec": "SEC", + "nsec": "NSEC", + } + + # List of token names. This is always required + tokens = [ + "COMMENT", + "IDENT", + "SOURCE_NAME", + "BINARY_BLOB", + "INT_LITERAL", + "STRING_LITERAL", + "RANGE", + "ASSIGN", + "INT_SIZE", + "SECTION_NAME", + #'SYMBOL_REF', replaced with a non-terminal symbol_ref + # Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=) + "PLUS", + "MINUS", + "TIMES", + "DIVIDE", + "MOD", + "OR", + "AND", + "NOT", + "XOR", + "LSHIFT", + "RSHIFT", + "LOR", + "LAND", + "LNOT", + "LT", + "LE", + "GT", + "GE", + "EQ", + "NE", + # Delimiters ( ) { } , . ; : + "LPAREN", + "RPAREN", + "LBRACE", + "RBRACE", + "COMMA", + "PERIOD", + "SEMI", + "COLON", + # Special characters + "QUESTIONMARK", + "DOLLAR", + ] + list(reserved.values()) + + literals = {"@"} + + # A regular expression rules with some action code + # The order of these functions MATTER!!! Make sure you know what you are + # doing, when changing the order of function declarations!!! + @_(r"(//.*)|(/\*(.|\s)*?\*/)|(\#.*)") # type: ignore + def COMMENT(self, token: Token) -> None: + """Token rule to detect comments (including multiline). + + Allowed comments are C/C++ like comments '/* */', '//' and bash-like + comments starting with '#'. + + :param token: token matching a comment + """ + # Multiline comments are counted as a single line. This causes us troubles + # in t_newline(), which treats the multiline comment as a single line causing + # a mismatch in the final line position. + # From this perspective we increment the linenumber here by the total + # number of lines - 1 (the subtracted 1 gets counted byt t_newline) + self.lineno += len(token.value.split("\n")) - 1 + + # It's not possible to detect INT_SIZE token while whitespaces are present between period and + # letter in real use case, because of regex engine limitation in positive lookbehind. + @_(r"(?<=(\d|[0-9a-fA-F])\.)[ \t]*[whb]") # type: ignore + def INT_SIZE(self, token: Token) -> Token: + """Token rule to detect numbers appended with w/h/b. + + Example: + my_number = 4.b + my_number = 1.h + my_number = 3.w + + The w/h/b defines size (Byte, Halfword, Word). This should be taken into + account during number computation. + + :param token: token matching int size + + :return: Token representing the size of int literal + """ + return token + + @_(r"[_a-zA-Z][_a-zA-Z0-9]*") # type: ignore + def IDENT(self, token: Token) -> Token: + """Token rule to detect identifiers. + + A valid identifier can start either with underscore or a letter followed + by any numbers of underscores, letters and numbers. + + If the name of an identifier is from the set of reserved keywords, the + token type is replaced with the keyword name, otherwise the token is + of type 'IDENT'. + Values of type TRUE/YES, FALSE/NO are replaces by 1 or 0 respectively. + + :param token: token matching an identifier pattern + :return: Token representing identifier + """ + # it may happen that we find an identifier, which is a keyword, in such + # a case remap the type from IDENT to reserved word (i.e. keyword) + token_type = self.reserved.get(token.value, "IDENT") + if token_type in ["TRUE", "YES"]: + token.type = "INT_LITERAL" + token.value = 1 + elif token_type in ["FALSE", "NO"]: + token.type = "INT_LITERAL" + token.value = 0 + else: + token.type = token_type + # check, whether the identifier is under sources, in such case + # change the type to SOURCE_NAME + for source in self._sources: + if source.name == token.value: + token.type = "SOURCE_NAME" + break + return token + + @_(r"\b([0-9]+[K]?|0[xX][0-9a-fA-F]+)\b|'.*'") # type: ignore + def INT_LITERAL(self, token: Token) -> Token: + """Token rule to detect integer literals. + + An int literal may be represented as a number in decimal form appended + with a 'K' or number in hexadecimal form. + + Example: + 1024 + 1K # same as above + -256 + 0x25 + + Lexer converts the detected string into a number. String literals + appended with 'K' are multiplied by 1024. + + :param token: token matching integer literal pattern + :return: Token representing integer literal + """ + number = token.value + if number[0] == "'" and number[-1] == "'": + # transform 'dude' into '0x64756465' + number = "0x" + bytearray(number[1:-1], "utf-8").hex() + number = int(number, 0) + elif number[-1] == "K": + number = int(number[:-1], 0) * 1024 + else: + number = int(number, 0) + + token.value = number + return token + + @_(r"\$[\w\.\*\?\-\^\[\]]+") # type: ignore + def SECTION_NAME(self, token: Token) -> Token: + """Token rule to detect section names. + + Section names start with a dollar sign ($) glob-type expression that + can match any number of ELF sections. + + Example: + $section_[ab] + $math* + + :param token: token matching section name pattern + :return: Token representing section name + """ + return token + + @_(r"\{\{([0-9a-fA-F]{2}| )+\}\}") # type: ignore + def BINARY_BLOB(self, token: Token) -> Token: + """Token rule to detect binary blob. + + A binary blob is a sequence of hexadecimal bytes in double curly braces. + + Example: + {{aa bb cc 1F 3C}} + + :param token: token matching binary blob pattern + :return: Token representing binary blob + """ + # return just the content between braces + value = token.value[2:-2] + + token.value = "".join(value.split()) + return token + + # A string containing ignored characters (spaces and tabs) + ignore = " \t" + + @_(r"\n") # type: ignore + def newline(self, token: Token) -> None: + """Token rule to detect new lines. + + On new line character the line number count is incremented. + + :param token: token matching new line character + """ + self.lineno += len(token.value) + + # Operators regular expressions + PLUS = r"\+" + MINUS = r"-" + TIMES = r"\*" + DIVIDE = r"/" + MOD = r"%" + NOT = r"~" + XOR = r"\^" + LSHIFT = r"<<" + RSHIFT = r">>" + LOR = r"\|\|" + OR = r"\|" + LAND = r"&&" + AND = r"&" + LE = r"<=" + LT = r"<" + GE = r">=" + GT = r">" + EQ = r"==" + NE = r"!=" + LNOT = r"!" + + # Tokens regular expressions + STRING_LITERAL = r"\".*\"" + RANGE = r"\.\." + + # Assignment operator regular expressions + ASSIGN = r"=" + + # Delimiters regular expressions + LPAREN = r"\(" + RPAREN = r"\)" + LBRACE = r"\{" + RBRACE = r"\}" + COMMA = r"," + PERIOD = r"\." + SEMI = r";" + COLON = r":" + + # Special characters + QUESTIONMARK = r"\?" + DOLLAR = r"\$" + + # Error handling rule + def error(self, t: Token) -> Token: + """Token error handler. + + The lexing index is incremented so lexing can continue, however, an + error token is returned. The token contains the whole text starting + with the detected error. + + :param t: invalid token. + :return: the invalid token. + """ + self.index += 1 + t.value = t.value[0] + return t diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_parser.py b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_parser.py new file mode 100644 index 00000000..d8281180 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/sbfile/sb2/sly_bd_parser.py @@ -0,0 +1,1562 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2021-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module implementing command (BD) file parser.""" + +import logging +from numbers import Number +from typing import Any, Dict, List, Optional + +from sly import Parser +from sly.lex import Token +from sly.yacc import YaccProduction + +from ...exceptions import SPSDKError +from . import sly_bd_lexer as bd_lexer + + +# pylint: disable=too-many-public-methods,too-many-lines +# too-many-public-methods : every method in the parser represents a syntax rule, +# this is necessary and thus can't be omitted. From this perspective this check +# is disabled. +# too-many-lines : the class can't be shortened, as all the methods represent +# rules. +class BDParser(Parser): # type: ignore + """Command (BD) file parser. + + The parser is based on SLY framework (python implementation of Lex/YACC) + and is used to parse the command file, which serves as an input for nxpimage + utility to create a secure binary in 2.1 format. + See the documentation for details. + """ + + # Import tokens from lexer. This is required by the parser! + tokens = bd_lexer.BDLexer.tokens + # tokens = BDLexer.tokens + + # Uncomment this line to output parser debug file + # debugfile = "parser.out" + + log = logging.getLogger(__name__) + log.setLevel(logging.ERROR) + + def __init__(self) -> None: + """Initialization method.""" + super().__init__() + self._variables: List[bd_lexer.Variable] = [] + self._sources: List[bd_lexer.Variable] = [] + self._keyblobs: List[Dict] = [] + self._sections: List[bd_lexer.Variable] = [] + self._input: Any = None + self._bd_file: Dict = {} + self._parse_error: bool = False + self._extern: List[str] = [] + self._lexer = bd_lexer.BDLexer() + + def _cleanup(self) -> None: + """Cleans up allocated resources before next parsing.""" + self._variables = [] + self._keyblobs = [] + self._sections = [] + # for some strange reason, mypy assumes this is a redefinition of _input + self._input = None + self._bd_file = {} + self._parse_error = False + self._lexer.cleanup() + + def parse( + self, text: str, extern: Optional[List] = None + ) -> Optional[Dict]: # pylint: disable=arguments-differ + """Parse the `input_text` and returns a dictionary of the file content. + + :param text: command file to be parsed in string format + :param extern: additional files defined on command line + + :return: dictionary of the command file content or None on Syntax error + """ + self._cleanup() + self._extern = extern or [] + # for some strange reason, mypy assumes this is a redefinition of _input + self._input: Any = text # type: ignore + + super().parse(self._lexer.tokenize(text)) + + if self._parse_error is True: + print("BD file parsing not successful.") + return None + + return self._bd_file + + # Operators precedence + precedence = ( + ("left", "LOR"), + ("left", "LAND"), + ("left", "OR"), + ("left", "XOR"), + ("left", "AND"), + ("left", "EQ", "NE"), + ("left", "GT", "GE", "LT", "LE"), + ("left", "LSHIFT", "RSHIFT"), + ("left", "PLUS", "MINUS"), + ("left", "TIMES", "DIVIDE", "MOD"), + ("right", "SIZEOF"), + ("right", "LNOT", "NOT"), + ) + + # pylint: disable=undefined-variable,function-redefined,no-self-use,unused-argument + # undefined-variable : the module uses underscore decorator to define + # each rule, however, this causes issues to mypy and pylint. + # function-redefined : each rule is identified by a function name and a + # decorator. However from code checking tools perspective, this is + # function redefinition. Thus we need to disable this rule as well. + # no-self-use : all 'rules' must be class methods, although they don't use + # self. Thus we need to omit this rule. + # unused-argument : not all token input arguments are always used, especially + # in rules which are not supported. + @_("pre_section_block section_block") # type: ignore + def command_file(self, token: YaccProduction) -> None: + """Parser rule. + + :param token: object holding the content defined in decorator. + """ + token.pre_section_block.update(token.section_block) + self._bd_file.update(token.pre_section_block) + + @_("pre_section_block options_block") # type: ignore + def pre_section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary defining the presection_block. + """ + options = token.pre_section_block.get("options", {}) + options.update(token.options_block["options"]) + token.pre_section_block["options"] = options + return token.pre_section_block + + @_("pre_section_block constants_block", "pre_section_block sources_block") # type: ignore + def pre_section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary defining the presection block. + """ + token.pre_section_block.update(token[1]) + return token.pre_section_block + + @_("pre_section_block keyblob_block") # type: ignore + def pre_section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary defining the presection block. + """ + if token.pre_section_block.get("keyblobs") is None: + token.pre_section_block["keyblobs"] = [] + token.pre_section_block["keyblobs"].append(token.keyblob_block) + return token.pre_section_block + + @_("empty") # type: ignore + def pre_section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary defining the presection block. + """ + return token.empty + + @_("OPTIONS LBRACE option_def RBRACE") # type: ignore + def options_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary defining the options block. + """ + return token.option_def + + @_("option_def IDENT ASSIGN const_expr SEMI") # type: ignore + def option_def(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding an option definition. + """ + # it appears, that in the option block anything can be defined, so + # we don't check, whether the identifiers defined there are from the + # allowed options anymore. The code is left just as a reminder. + # identifier = token.IDENT + # if identifier in self.allowed_option_identifiers: + # self._variables.append(self.Variable(token.IDENT, "option", token.const_expr)) + # token.option_def["options"].update({token.IDENT : token.const_expr}) + # return token.option_def + # else: + # column = BDParser._find_column(self._input, token) + # print(f"Unknown option in options block at {token.lineno}/{column}: {token.IDENT}") + # self.error(token) + self._variables.append( + bd_lexer.Variable(token.IDENT, "option", token.const_expr) + ) + token.option_def["options"].update({token.IDENT: token.const_expr}) + return token.option_def + + @_("empty") # type: ignore + def option_def(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding an empty option definition. + """ + return {"options": {}} + + @_("CONSTANTS LBRACE constant_def RBRACE") # type: ignore + def constants_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + For now, we don't store the constants in the final bd file. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of constants block. + """ + dictionary: Dict = {} + return dictionary + + @_("constant_def IDENT ASSIGN bool_expr SEMI") # type: ignore + def constant_def(self, token: YaccProduction): + """Parser rule. + + :param token: object holding the content defined in decorator. + """ + self._variables.append( + bd_lexer.Variable(token.IDENT, "constant", token.bool_expr) + ) + + @_("empty") # type: ignore + def constant_def(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding empty constant definition. + """ + return token.empty + + @_("SOURCES LBRACE source_def RBRACE") # type: ignore + def sources_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't store the sources in the final BD file for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the definition of sources + """ + sources = {} + for source in self._lexer._sources: + sources[source.name] = source.value + return {"sources": sources} + + @_("source_def IDENT ASSIGN source_value SEMI") # type: ignore + def source_def(self, token: YaccProduction) -> None: + """Parser rule. + + :param token: object holding the content defined in decorator. + """ + new_source = bd_lexer.Variable(token.IDENT, "source", token.source_value) + self._lexer.add_source(new_source) + + @_("source_def IDENT ASSIGN source_value LPAREN source_attr_list RPAREN SEMI") # type: ignore + def source_def(self, token: YaccProduction) -> None: + """Parser rule. + + :param token: object holding the content defined in decorator. + """ + # self._sources.append(self.Variable(token.IDENT, "source", token.source_value)) + error_token = Token() + error_token.lineno = token.lineno + error_token.index = token._slice[4].index + self.error(error_token, ": attribute list is not supported") + + @_("empty") # type: ignore + def source_def(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding empty content. + """ + return token.empty + + @_("STRING_LITERAL") # type: ignore + def source_value(self, token: YaccProduction) -> str: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: string defining the source value. + """ + # Everything we read is a string. But strings already contain double quotes, + # from this perspective we need to remove them, this omit the first and last + # character. + return token.STRING_LITERAL[1:-1] + + @_("EXTERN LPAREN int_const_expr RPAREN") # type: ignore + def source_value(self, token: YaccProduction) -> str: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: string defining a path defined on command line. + """ + if token.int_const_expr > len(self._extern) - 1: + self.error(token, ": extern() out of range") + return "" + return self._extern[token.int_const_expr] + + @_("source_attr COMMA source_attr_list") # type: ignore + def source_attr_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: empty dictionary as this is not supported right now. + """ + dictionary = {} + return dictionary + + @_("source_attr") # type: ignore + def source_attr_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: returns dictionary holding content of source attribute. + """ + return token.source_attr + + @_("empty") # type: ignore + def source_attr_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: returns dictionary holding content of empty source attribute list. + """ + return {} + + @_("IDENT ASSIGN const_expr") # type: ignore + def source_attr(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of source file attributes. + """ + return {token.IDENT: token.const_expr} + + @_("KEYBLOB LPAREN int_const_expr RPAREN LBRACE keyblob_contents RBRACE") # type: ignore + def keyblob_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of keyblob block. + """ + dictionary = { + "keyblob_id": token.int_const_expr, + "keyblob_content": token.keyblob_contents, + } + dictionary["keyblob_id"] = token.int_const_expr + dictionary["keyblob_content"] = token.keyblob_contents + self._keyblobs.append(dictionary) + return dictionary + + # The legacy tool allowed to have multiple definitions inside a keyblob. + # It has been agreed, that this makes no sense and may be dangerous. + # However, it may happen, that someone comes with a use cases, where legacy + # grammar is needed, thus the code has been left untouched just in case. + # @_("keyblob_contents LPAREN keyblob_options_list RPAREN") + # def keyblob_contents(self, token): + # l = token.keyblob_contents + + # # Append only non-empty options lists to simplify further processing + # if len(token.keyblob_options_list) != 0: + # l.append(token.keyblob_options_list) + # return l + + # @_("empty") + # def keyblob_contents(self, token): + # return [] + + # @_("keyblob_options") + # def keyblob_options_list(self, token): + # return token.keyblob_options + + # @_("empty") + # def keyblob_options_list(self, token): + # # After discussion internal discussion, we will ignore empty definitions in keyblob + # # It's not clear, whether this has some effect on the final sb file or not. + # # C++ elftosb implementation is able to parse the file even without empty + # # parenthesis + # return token.empty + + # @_("IDENT ASSIGN const_expr COMMA keyblob_options") + # def keyblob_options(self, token): + # d = {} + # d[token.IDENT] = token.const_expr + # d.update(token.keyblob_options) + # return d + + # @_("IDENT ASSIGN const_expr") + # def keyblob_options(self, token): + # d = {} + # d[token.IDENT] = token.const_expr + # return d + + # New keyblob grammar! + @_("LPAREN keyblob_options RPAREN") # type: ignore + def keyblob_contents(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list containing options of each keyblob. + """ + list_ = [token.keyblob_options] + + return list_ + + @_("IDENT ASSIGN const_expr COMMA keyblob_options") # type: ignore + def keyblob_options(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of keyblob options. + """ + dictionary = {} + dictionary[token.IDENT] = token.const_expr + dictionary.update(token.keyblob_options) + return dictionary + + @_("IDENT ASSIGN const_expr") # type: ignore + def keyblob_options(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the keyblob option. + """ + dictionary = {} + dictionary[token.IDENT] = token.const_expr + return dictionary + + @_("section_block SECTION LPAREN int_const_expr section_options RPAREN section_contents") # type: ignore + def section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a section. + """ + self._sections.append( + { + "section_id": token.int_const_expr, + "options": token.section_options, + "commands": token.section_contents, + } + ) + token.section_block["sections"] += [ + { + "section_id": token.int_const_expr, + "options": token.section_options, + "commands": token.section_contents, + } + ] + return token.section_block + + @_("empty") # type: ignore + def section_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding content of empty section. + """ + token.empty["sections"] = [] + return token.empty + + @_("SEMI section_option_list") # type: ignore + def section_options(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of section options. + """ + return token.section_option_list + + @_("SEMI") # type: ignore + def section_options(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of empty section options. + """ + dictionary = {} + return dictionary + + @_("empty") # type: ignore + def section_options(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of empty section options. + """ + return token.empty + + @_("section_option_list COMMA section_option") # type: ignore + def section_option_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of section options. + """ + options = {} + options.update(token.section_option) + if token.section_option_list: + token.section_option_list.append(options) + return token.section_option_list + + @_("section_option") # type: ignore + def section_option_list(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding a section option. + """ + return [token.section_option] + + @_("IDENT ASSIGN const_expr") # type: ignore + def section_option(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a section option. + """ + return {token.IDENT: token.const_expr} + + @_("LBRACE statement RBRACE") # type: ignore + def section_contents(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the section statements. + """ + return token.statement + + @_("LE SOURCE_NAME SEMI") # type: ignore + def section_contents(self, token: YaccProduction) -> None: + """Parser rule. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": <= syntax is not supported right now.") + + @_("statement basic_stmt SEMI") # type: ignore + def statement(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list holding section statements. + """ + list_ = [] + token.statement + list_.append(token.basic_stmt) + return list_ + + @_("statement from_stmt") # type: ignore + def statement(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support from_stmt for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of from_stmt. + """ + dictionary = {} + return dictionary + + @_("statement if_stmt") # type: ignore + def statement(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support if statements for now. + + :param token: object holding the content defined in decorator. + """ + # return token.statement + token.if_stmt + + @_("statement encrypt_block") # type: ignore + def statement(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list containing the encrypt statement. + """ + list_ = [] + token.statement + list_.append(token.encrypt_block) + return list_ + + @_("statement keywrap_block") # type: ignore + def statement(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list containing the keywrap statement. + """ + list_ = [] + token.statement + list_.append(token.keywrap_block) + return list_ + + @_("empty") # type: ignore + def statement(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: empty list. + """ + # return empty statement list + return [] + + @_("KEYWRAP LPAREN int_const_expr RPAREN LBRACE LOAD BINARY_BLOB GT int_const_expr SEMI RBRACE") # type: ignore + def keywrap_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the keywrap block content. + """ + dictionary = {"keywrap": {"keyblob_id": token.int_const_expr0}} + load_cmd = {"address": token.int_const_expr1, "values": token.BINARY_BLOB} + dictionary["keywrap"].update(load_cmd) + return dictionary + + @_("ENCRYPT LPAREN int_const_expr RPAREN LBRACE load_stmt SEMI RBRACE") # type: ignore + def encrypt_block(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the encrypt block content. + """ + dictionary = {"encrypt": {"keyblob_id": token.int_const_expr}} + dictionary["encrypt"].update(token.load_stmt.get("load")) + return dictionary + + @_( # type: ignore + "load_stmt", + "call_stmt", + "jump_sp_stmt", + "mode_stmt", + "message_stmt", + "erase_stmt", + "enable_stmt", + "reset_stmt", + "keystore_stmt", + "version_stmt", + ) + def basic_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of defined statements. + """ + return token[0] + + @_("LOAD load_opt load_data load_target") # type: ignore + def load_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a load statement. + """ + # pattern with load options means load -> program command + if ( + token.load_data.get("pattern") is not None + and token.load_opt.get("load_opt") is None + ): + cmd = "fill" + else: + cmd = "load" + dictionary: Dict = {cmd: {}} + dictionary[cmd].update(token.load_opt) + dictionary[cmd].update(token.load_data) + dictionary[cmd].update(token.load_target) + return dictionary + + @_("empty") # type: ignore + def load_opt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load options. + """ + return token.empty + + @_("'@' int_const_expr") # type: ignore + def load_opt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load options. + """ + return {"load_opt": token.int_const_expr} + + @_("IDENT") # type: ignore + def load_opt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load options. + """ + return {"load_opt": token.IDENT} + + @_("int_const_expr") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + if isinstance(token.int_const_expr, str): + self.error( + token, + f": identifier '{token.int_const_expr}' is not a source identifier.", + ) + retval = {"N/A": "N/A"} + else: + retval = {"pattern": token.int_const_expr} + + return retval + + @_("STRING_LITERAL") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + return {"file": token.STRING_LITERAL[1:-1]} + + @_("SOURCE_NAME") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + for source in self._lexer._sources: + if token.SOURCE_NAME == source.name: + return {"file": source.value} + + # with current implementation, this code won't be ever reached. In case + # a not defined source file is used as `load_data`, the parser detects + # it as a different rule: + # + # load_data ::= int_const_expr + # + # which evaluates as false... however, this fragment is left just in + # in case something changes. + self.error(token, ": source file not defined") + return {"file": "N/A"} + + @_("section_list") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + self.error(token, ": section list is not supported") + dictionary = {} + return dictionary + + @_("section_list FROM SOURCE_NAME") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + self.error(token, "section list using from is not supported") + dictionary = {} + return dictionary + + @_("BINARY_BLOB") # type: ignore + def load_data(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load data. + """ + # no_spaces = "".join(token.BINARY_BLOB.split()) + + return {"values": token.BINARY_BLOB} + + @_("GT PERIOD") # type: ignore + def load_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the load target. + """ + self.error(token, ": '.' as load destination is not supported right now") + dictionary = {} + return dictionary + + @_("GT address_or_range") # type: ignore + def load_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of load target. + """ + return token.address_or_range + + @_("empty") # type: ignore + def load_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: empty dictionary. + """ + self.error(token, ": empty load target is not supported right now.") + return token.empty + + @_("ERASE mem_opt address_or_range") # type: ignore + def erase_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of erase statement. + """ + dictionary: Dict = {token.ERASE: {}} + dictionary[token.ERASE].update(token.address_or_range) + dictionary[token.ERASE].update(token.mem_opt) + return dictionary + + @_("ERASE mem_opt ALL") # type: ignore + def erase_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of erase statement. + """ + dictionary: Dict = {token.ERASE: {"address": 0x00, "flags": 0x01}} + dictionary[token.ERASE].update(token.mem_opt) + return dictionary + + @_("ERASE UNSECURE ALL") # type: ignore + def erase_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of erase statement. + """ + return {"erase": {"address": 0x00, "flags": 0x02}} + + @_("ENABLE mem_opt int_const_expr") # type: ignore + def enable_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of enable statement. + """ + dictionary: Dict = {token.ENABLE: {}} + dictionary[token.ENABLE].update(token.mem_opt) + dictionary[token.ENABLE]["address"] = token.int_const_expr + return dictionary + + @_("section_list COMMA section_ref") # type: ignore + def section_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the section list content. + """ + dictionary = {} + return dictionary + + @_("section_ref") # type: ignore + def section_list(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a section reference. + """ + return token.section_ref + + @_("NOT SECTION_NAME") # type: ignore + def section_ref(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a section reference. + """ + self.error(token, ": section reference is not supported.") + dictionary = {} + return dictionary + + @_("SECTION_NAME") # type: ignore + def section_ref(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a section reference. + """ + self.error(token, ": section reference is not supported.") + return {token.SECTION_NAME} + + @_("int_const_expr") # type: ignore + def address_or_range(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of an address. + """ + address_start = token.int_const_expr + return {"address": address_start} + + @_("int_const_expr RANGE int_const_expr") # type: ignore + def address_or_range(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of an address range. + """ + address_start = token.int_const_expr0 + length = token.int_const_expr1 - address_start + return {"address": address_start, "length": length} + + @_("SOURCE_NAME QUESTIONMARK COLON IDENT") # type: ignore + def symbol_ref(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": symbol reference is not supported.") + + @_("call_type call_target call_arg") # type: ignore + def call_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a call statement. + """ + dictionary: Dict = {token.call_type: {}} + dictionary[token.call_type].update(token.call_target) + dictionary[token.call_type].update(token.call_arg) + return dictionary + + @_("CALL", "JUMP") # type: ignore + def call_type(self, token: YaccProduction) -> str: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: string representing 'call' or 'jump' + """ + return token[0] + + @_("int_const_expr") # type: ignore + def call_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a call_target. + """ + return {"address": token.int_const_expr} + + @_("SOURCE_NAME") # type: ignore + def call_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a call target. + """ + self.error(token, ": source name as call target is not supported.") + dictionary = {} + return dictionary + + @_("symbol_ref") # type: ignore + def call_target(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a call target. + """ + self.error(token, ": symbol reference as call target is not supported.") + dictionary = {} + return dictionary + + @_("LPAREN RPAREN") # type: ignore + def call_arg(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding an empty call argument. + """ + dictionary = {} + return dictionary + + @_("LPAREN int_const_expr RPAREN") # type: ignore + def call_arg(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding a call argument. + """ + return {"argument": token.int_const_expr} + + @_("empty") # type: ignore + def call_arg(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding an empty call argument. + """ + return token.empty + + @_("JUMP_SP int_const_expr call_target call_arg") # type: ignore + def jump_sp_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content jump statement. + """ + dictionary: Dict = {"jump": {}} + dictionary["jump"]["spreg"] = token.int_const_expr + dictionary["jump"].update(token.call_target) + dictionary["jump"].update(token.call_arg) + return dictionary + + @_("RESET") # type: ignore + def reset_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of a reset statement. + """ + return {"reset": {}} + + @_("FROM SOURCE_NAME LBRACE in_from_stmt RBRACE") # type: ignore + def from_stmt(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": from statement not supported.") + + @_("basic_stmt SEMI") # type: ignore + def in_from_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list of statements. + """ + return token.basic_stmt + + @_("if_stmt") # type: ignore + def in_from_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list of statements. + """ + return token.if_stmt + + @_("empty") # type: ignore + def in_from_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: empty list. + """ + return [] + + @_("MODE int_const_expr") # type: ignore + def mode_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: + """ + self.error(token, ": mode statement is not supported") + dictionary: Dict = {} + return dictionary + + @_("message_type STRING_LITERAL") # type: ignore + def message_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the message statement. + """ + dictionary: Dict = {} + return dictionary + + @_("INFO", "WARNING", "ERROR") # type: ignore + def message_type(self, token: YaccProduction) -> Dict: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: empty dictionary. + """ + self.error(token, ": info/warning/error messages are not supported.") + dictionary: Dict = {} + return dictionary + + @_("KEYSTORE_TO_NV mem_opt address_or_range") # type: ignore + def keystore_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content keystore statement. + """ + dictionary = {token.KEYSTORE_TO_NV: {}} + dictionary[token.KEYSTORE_TO_NV].update(token.mem_opt) + dictionary[token.KEYSTORE_TO_NV].update(token.address_or_range) + return dictionary + + @_("KEYSTORE_FROM_NV mem_opt address_or_range") # type: ignore + def keystore_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content keystore statement. + """ + dictionary = {token.KEYSTORE_FROM_NV: {}} + dictionary[token.KEYSTORE_FROM_NV].update(token.mem_opt) + dictionary[token.KEYSTORE_FROM_NV].update(token.address_or_range) + return dictionary + + @_("IDENT") # type: ignore + def mem_opt(self, token: YaccProduction) -> None: + """Parser rule. + + Unsupported syntax right now. + + :param token: object holding the content defined in decorator. + """ + # search in variables for token.IDENT variable and get it's value + return {"mem_opt": token.IDENT} + + @_("'@' int_const_expr") # type: ignore + def mem_opt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of memory type. + """ + dictionary = {"mem_opt": token.int_const_expr} + return dictionary + + @_("empty") # type: ignore + def mem_opt(self, token: YaccProduction) -> None: + """Parser rule. + + Unsupported syntax right now. + + :param token: object holding the content defined in decorator. + """ + return token.empty + + @_("VERSION_CHECK sec_or_nsec fw_version") # type: ignore + def version_stmt(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of version check statement. + """ + dictionary: Dict = {token.VERSION_CHECK: {}} + dictionary[token.VERSION_CHECK].update(token.sec_or_nsec) + dictionary[token.VERSION_CHECK].update(token.fw_version) + return dictionary + + @_("SEC") # type: ignore + def sec_or_nsec(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of version check type. + """ + dictionary = {"ver_type": 0} + return dictionary + + @_("NSEC") # type: ignore + def sec_or_nsec(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of version check type. + """ + dictionary = {"ver_type": 1} + return dictionary + + @_("int_const_expr") # type: ignore + def fw_version(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: dictionary holding the content of fw version. + """ + dictionary = {"fw_version": token.int_const_expr} + return dictionary + + @_("IF bool_expr LBRACE statement RBRACE else_stmt") # type: ignore + def if_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: list of if statements. + """ + self.error(token, ": if & if-else statement is not supported.") + if token.bool_expr: + return token.statement + + return token.else_stmt + + @_("ELSE LBRACE statement RBRACE") # type: ignore + def else_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list of else statements. + """ + return token.statement + + @_("ELSE if_stmt") # type: ignore + def else_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: list of else if statements. + """ + return token.if_stmt + + @_("empty") # type: ignore + def else_stmt(self, token: YaccProduction) -> List: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: empty list. + """ + list_ = [] + return list_ + + @_("STRING_LITERAL") # type: ignore + def const_expr(self, token: YaccProduction) -> str: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: string. + """ + return token.STRING_LITERAL[1:-1] + + @_("bool_expr") # type: ignore + def const_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: boolean value as a result of constant expression. + """ + return token.bool_expr + + @_("expr") # type: ignore + def int_const_expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: Number as a result of an expression. + """ + return token.expr + + @_("DEFINED LPAREN IDENT RPAREN") # type: ignore + def bool_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: boolean value as a result if some identifier is defined. + """ + return token.IDENT in self._variables + + @_( # type: ignore + "bool_expr LT bool_expr", + "bool_expr LE bool_expr", + "bool_expr GT bool_expr", + "bool_expr GE bool_expr", + "bool_expr EQ bool_expr", + "bool_expr NE bool_expr", + "bool_expr LAND bool_expr", + "bool_expr LOR bool_expr", + "LPAREN bool_expr RPAREN", + ) + def bool_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: boolean value as a result of boolean expression. + """ + operator = token[1] + if operator == "<": + return token.bool_expr0 < token.bool_expr1 + if operator == "<=": + return token.bool_expr0 <= token.bool_expr1 + if operator == ">": + return token.bool_expr0 > token.bool_expr1 + if operator == ">=": + return token.bool_expr0 >= token.bool_expr1 + if operator == "==": + return token.bool_expr0 == token.bool_expr1 + if operator == "!=": + return token.bool_expr0 != token.bool_expr1 + if operator == "&&": + return token.bool_expr0 and token.bool_expr1 + if operator == "||": + return token.bool_expr0 or token.bool_expr1 + + return token[1] + + @_("int_const_expr") # type: ignore + def bool_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: boolean value as a result of a boolean expression. + """ + return token.int_const_expr + + @_("LNOT bool_expr") # type: ignore + def bool_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: boolean value as a result of logical not expression. + """ + return not token.bool_expr + + @_("IDENT LPAREN SOURCE_NAME RPAREN") # type: ignore + def bool_expr(self, token: YaccProduction) -> bool: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + :return: boolean value (at the moment always False, as not supported). + """ + # I've absolutely no clue, what this rule can mean or be for??? + self.error(token, ": IDENT ( SOURCE_NAME ) is not supported.") + return False + + @_( # type: ignore + "expr PLUS expr", + "expr MINUS expr", + "expr TIMES expr", + "expr DIVIDE expr", + "expr MOD expr", + "expr LSHIFT expr", + "expr RSHIFT expr", + "expr AND expr", + "expr OR expr", + "expr XOR expr", + "expr PERIOD INT_SIZE", + "LPAREN expr RPAREN", + ) + def expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: number as a result of an expression. + """ + operator = token[1] + if operator == "+": + return token.expr0 + token.expr1 + if operator == "-": + return token.expr0 - token.expr1 + if operator == "*": + return token.expr0 - token.expr1 + if operator == "/": + return token.expr0 // token.expr1 + if operator == "%": + return token.expr0 % token.expr1 + if operator == "<<": + return token.expr0 << token.expr1 + if operator == ">>": + return token.expr0 >> token.expr1 + if operator == "&": + return token.expr0 & token.expr1 + if operator == "|": + return token.expr0 | token.expr1 + if operator == "^": + return token.expr0 ^ token.expr1 + if operator == ".": + char = token.INT_SIZE + if char == "w": + return token[0] & 0xFFFF + if char == "h": + return token[0] & 0xFF + if char == "b": + return token[0] & 0xF + # LPAREN expr RPAREN + return token[1] + + @_("INT_LITERAL") # type: ignore + def expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: integer number as a terminal. + """ + return token.INT_LITERAL + + @_("IDENT") # type: ignore + def expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: number stored under identifier. + """ + # we need to convert the IDENT into a value stored under that identifier + # search the variables and check, whether there is a name of IDENT + for var in self._variables: + if var.name == token.IDENT: + return var.value + + return token.IDENT + + @_("symbol_ref") # type: ignore + def expr(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": symbol reference is not supported.") + + @_("unary_expr") # type: ignore + def expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: number as a result of unary expression. + """ + return token.unary_expr + + @_("SIZEOF LPAREN symbol_ref RPAREN") # type: ignore + def expr(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": sizeof operator is not supported") + + @_("SIZEOF LPAREN IDENT RPAREN") # type: ignore + def expr(self, token: YaccProduction) -> None: + """Parser rule. + + We don't support this rule for now. + + :param token: object holding the content defined in decorator. + """ + self.error(token, ": sizeof operator is not supported") + + @_("PLUS expr", "MINUS expr") # type: ignore + def unary_expr(self, token: YaccProduction) -> Number: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: number as a result of unary expression. + """ + sign = token[0] + number = token.expr + if sign == "-": + number = -number + + return number + + @_("") # type: ignore + def empty(self, token: YaccProduction) -> Dict: + """Parser rule. + + :param token: object holding the content defined in decorator. + :return: empty dictionary. + """ + dictionary: Dict = {} + return dictionary + + @staticmethod + def _find_column(text: str, token: YaccProduction) -> int: + """Finds the column of token in input. + + :param text: input file being parsed + :param token: object holding the content defined in decorator. + :return: column based on token index. + """ + last_cr = text.rfind("\n", 0, token.index) + if last_cr < 0: + last_cr = 0 + else: + last_cr += 1 + column = (token.index - last_cr) + 1 + return column + + @staticmethod + def _find_line(text: str, line_num: int) -> str: + """Finds the line in text based on line number. + + :param text: text to return required line. + :param line_num: line number to return. + :return: line 'line_num" in 'text'. + """ + lines = text.split("\n") + + return lines[line_num] + + def error( + self, token: YaccProduction, msg: str = "" + ) -> YaccProduction: # pylint: disable=arguments-differ + """Syntax error handler. + + On syntax error, we set an error flag and read the rest of input file + until end to terminate the process of parsing. + + :param token: object holding the content defined in decorator. + :param msg: error message to use. + + :raises SPSDKError: Raises error with 'msg' message. + """ + self._parse_error = True + + if token: + lineno = getattr(token, "lineno", -1) + if lineno != -1: + column = BDParser._find_column(self._input, token) + error_line = BDParser._find_line(self._input, lineno - 1) + raise SPSDKError( + f"bdcompiler:{lineno}:{column}: error{msg}\n\n{error_line}\n" + + (column - 1) * " " + + "^\n" + ) + + raise SPSDKError(f"bdcompiler: error{msg}\n") + + raise SPSDKError("bdcompiler: unspecified error.") diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/__init__.py new file mode 100644 index 00000000..99ff7fd3 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module containing various functions/modules used throughout the SPSDK.""" + +from .exceptions import ( + SPSDKRegsError, + SPSDKRegsErrorBitfieldNotFound, + SPSDKRegsErrorEnumNotFound, + SPSDKRegsErrorRegisterGroupMishmash, + SPSDKRegsErrorRegisterNotFound, +) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/abstract.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/abstract.py new file mode 100644 index 00000000..3c8131c5 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/abstract.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module for base abstract classes.""" + +from abc import ABC, abstractmethod +from typing import Any + +from typing_extensions import Self + + +######################################################################################################################## +# Abstract Class for Data Classes +######################################################################################################################## +class BaseClass(ABC): + """Abstract Class for Data Classes.""" + + def __eq__(self, obj: Any) -> bool: + """Check object equality.""" + return isinstance(obj, self.__class__) and vars(obj) == vars(self) + + def __ne__(self, obj: Any) -> bool: + return not self.__eq__(obj) + + @abstractmethod + def __repr__(self) -> str: + """Object representation in string format.""" + + @abstractmethod + def __str__(self) -> str: + """Object description in string format.""" + + @abstractmethod + def export(self) -> bytes: + """Serialize object into bytes array.""" + + @classmethod + @abstractmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/__init__.py new file mode 100644 index 00000000..e9525d40 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module for cryptographic utilities.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/cert_blocks.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/cert_blocks.py new file mode 100644 index 00000000..df587e66 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/cert_blocks.py @@ -0,0 +1,1859 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module for handling Certificate block.""" + +import datetime +import logging +import os +import re +from abc import abstractmethod +from struct import calcsize, pack, unpack_from +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Type, Union + +from typing_extensions import Self + +from ... import version as spsdk_version +from ...crypto.certificate import Certificate +from ...crypto.hash import EnumHashAlgorithm, get_hash +from ...crypto.keys import PrivateKeyRsa, PublicKeyEcc +from ...crypto.signature_provider import SignatureProvider, get_signature_provider +from ...crypto.types import SPSDKEncoding +from ...crypto.utils import ( + extract_public_key, + extract_public_key_from_data, + get_matching_key_id, +) +from ...exceptions import ( + SPSDKError, + SPSDKNotImplementedError, + SPSDKTypeError, + SPSDKUnsupportedOperation, + SPSDKValueError, +) +from ...utils.abstract import BaseClass +from ...utils.crypto.rkht import RKHTv1, RKHTv21 +from ...utils.database import DatabaseManager, get_db, get_families, get_schema_file +from ...utils.misc import ( + Endianness, + align, + align_block, + change_endianness, + find_file, + load_binary, + load_configuration, + split_data, + value_to_int, + write_file, +) +from ...utils.schema_validator import CommentedConfig + +logger = logging.getLogger(__name__) + + +class CertBlock(BaseClass): + """Common general class for various CertBlocks.""" + + @classmethod + @abstractmethod + def get_supported_families(cls) -> List[str]: + """Get supported families for certification block.""" + + @classmethod + @abstractmethod + def get_validation_schemas(cls) -> List[Dict[str, Any]]: + """Create the list of validation schemas. + + :return: List of validation schemas. + """ + + @staticmethod + @abstractmethod + def generate_config_template(family: Optional[str] = None) -> str: + """Generate configuration for certification block.""" + + @classmethod + @abstractmethod + def from_config( + cls, + config: Dict[str, Any], + search_paths: Optional[List[str]] = None, + ) -> Self: + """Creates an instance of cert block from configuration.""" + + @abstractmethod + def create_config(self, data_path: str) -> str: + """Create configuration of the Certification block Image.""" + + @classmethod + def get_cert_block_class(cls, family: str) -> Type["CertBlock"]: + """Get certification block class by family name. + + :param family: Chip family + :raises SPSDKError: No certification block class found for given family + """ + for cert_block_class in cls.get_cert_block_classes(): + if family in cert_block_class.get_supported_families(): + return cert_block_class + raise SPSDKError( + f"Family '{family}' is not supported in any certification block." + ) + + @classmethod + def get_all_supported_families(cls) -> List[str]: + """Get supported families for all certification blocks except for SRK.""" + families = get_families(DatabaseManager.CERT_BLOCK) + + return [ + family + for family in families + if "srk" + not in get_db(family, "latest").get_str( + DatabaseManager.CERT_BLOCK, "rot_type" + ) + ] + + @classmethod + def get_cert_block_classes(cls) -> List[Type["CertBlock"]]: + """Get list of all cert block classes.""" + return CertBlock.__subclasses__() + + @property + def rkth(self) -> bytes: + """Root Key Table Hash 32-byte hash (SHA-256) of SHA-256 hashes of up to four root public keys.""" + return bytes() + + @classmethod + def _get_supported_families(cls, cert_block_type: str) -> List[str]: + """Get list of supported families. + + :param cert_block_type: Type of certification block to look for + :return: List of devices that supports this cert block + """ + families = cls.get_all_supported_families() + + return [ + family + for family in families + if get_db(family, "latest").get_str(DatabaseManager.CERT_BLOCK, "rot_type") + == cert_block_type + ] + + @classmethod + def get_root_private_key_file(cls, config: Dict[str, Any]) -> Optional[str]: + """Get main root private key file from config. + + :param config: Configuration to be searched. + :return: Root private key file path. + """ + private_key_file = config.get( + "signPrivateKey", config.get("mainRootCertPrivateKeyFile") + ) + if private_key_file and not isinstance(private_key_file, str): + raise SPSDKTypeError("Root private key file must be a string type") + return private_key_file + + @classmethod + def find_main_cert_index( + cls, config: Dict[str, Any], search_paths: Optional[List[str]] = None + ) -> Optional[int]: + """Go through all certificates and find the index matching to private key. + + :param config: Configuration to be searched. + :param search_paths: List of paths where to search for the file, defaults to None + :return: List of root certificates. + """ + try: + signature_provider = get_signature_provider( + sp_cfg=config.get("signProvider"), + local_file_key=cls.get_root_private_key_file(config), + search_paths=search_paths, + ) + except SPSDKError as exc: + logger.debug(f"A signature provider could not be created: {exc}") + return None + root_certificates = find_root_certificates(config) + public_keys = [] + for root_crt_file in root_certificates: + try: + public_key = extract_public_key( + root_crt_file, search_paths=search_paths + ) + public_keys.append(public_key) + except SPSDKError: + continue + try: + idx = get_matching_key_id(public_keys, signature_provider) + return idx + except (SPSDKValueError, SPSDKUnsupportedOperation) as exc: + logger.debug(f"Main cert index could not be found: {exc}") + return None + + @classmethod + def get_main_cert_index( + cls, config: Dict[str, Any], search_paths: Optional[List[str]] = None + ) -> int: + """Gets main certificate index from configuration. + + :param config: Input standard configuration. + :param search_paths: List of paths where to search for the file, defaults to None + :return: Certificate index + :raises SPSDKError: If invalid configuration is provided. + :raises SPSDKError: If correct certificate could not be identified. + :raises SPSDKValueError: If certificate is not of correct type. + """ + root_cert_id = config.get("mainRootCertId") + cert_chain_id = config.get("mainCertChainId") + if ( + root_cert_id is not None + and cert_chain_id is not None + and root_cert_id != cert_chain_id + ): + raise SPSDKError( + "The mainRootCertId and mainRootCertId are specified and have different values." + ) + found_cert_id = cls.find_main_cert_index( + config=config, search_paths=search_paths + ) + if root_cert_id is None and cert_chain_id is None: + if found_cert_id is not None: + return found_cert_id + raise SPSDKError("Certificate could not be found") + # root_cert_id may be 0 which is falsy value, therefore 'or' cannot be used + cert_id = root_cert_id if root_cert_id is not None else cert_chain_id + try: + cert_id = int(cert_id) + except ValueError as exc: + raise SPSDKValueError( + f"A certificate index is not a number: {cert_id}" + ) from exc + if found_cert_id is not None and found_cert_id != cert_id: + logger.warning("Defined certificate does not match the private key.") + return cert_id + + +######################################################################################################################## +# Certificate Block Header Class +######################################################################################################################## +class CertBlockHeader(BaseClass): + """Certificate block header.""" + + FORMAT = "<4s2H6I" + SIZE = calcsize(FORMAT) + SIGNATURE = b"cert" + + def __init__( + self, version: str = "1.0", flags: int = 0, build_number: int = 0 + ) -> None: + """Constructor. + + :param version: Version of the certificate in format n.n + :param flags: Flags for the Certificate Header + :param build_number: of the certificate + :raises SPSDKError: When there is invalid version + """ + if not re.match(r"[0-9]+\.[0-9]+", version): # check format of the version: N.N + raise SPSDKError("Invalid version") + self.version = version + self.flags = flags + self.build_number = build_number + self.image_length = 0 + self.cert_count = 0 + self.cert_table_length = 0 + + def __repr__(self) -> str: + nfo = f"CertBlockHeader: V={self.version}, F={self.flags}, BN={self.build_number}, IL={self.image_length}, " + nfo += f"CC={self.cert_count}, CTL={self.cert_table_length}" + return nfo + + def __str__(self) -> str: + """Info of the certificate header in text form.""" + nfo = str() + nfo += f" CB Version: {self.version}\n" + nfo += f" CB Flags: {self.flags}\n" + nfo += f" CB Build Number: {self.build_number}\n" + nfo += f" CB Image Length: {self.image_length}\n" + nfo += f" CB Cert. Count: {self.cert_count}\n" + nfo += f" CB Cert. Length: {self.cert_table_length}\n" + return nfo + + def export(self) -> bytes: + """Certificate block in binary form.""" + major_version, minor_version = [int(v) for v in self.version.split(".")] + return pack( + self.FORMAT, + self.SIGNATURE, + major_version, + minor_version, + self.SIZE, + self.flags, + self.build_number, + self.image_length, + self.cert_count, + self.cert_table_length, + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Deserialize object from bytes array. + + :param data: Input data as bytes + :return: Certificate Header instance + :raises SPSDKError: Unexpected size or signature of data + """ + if cls.SIZE > len(data): + raise SPSDKError("Incorrect size") + ( + signature, + major_version, + minor_version, + length, + flags, + build_number, + image_length, + cert_count, + cert_table_length, + ) = unpack_from(cls.FORMAT, data) + if signature != cls.SIGNATURE: + raise SPSDKError("Incorrect signature") + if length != cls.SIZE: + raise SPSDKError("Incorrect length") + obj = cls( + version=f"{major_version}.{minor_version}", + flags=flags, + build_number=build_number, + ) + obj.image_length = image_length + obj.cert_count = cert_count + obj.cert_table_length = cert_table_length + return obj + + +######################################################################################################################## +# Certificate Block Class +######################################################################################################################## +class CertBlockV1(CertBlock): + """Certificate block. + + Shared for SB file 2.1 and for MasterBootImage using RSA keys. + """ + + # default size alignment + DEFAULT_ALIGNMENT = 16 + + @property + def header(self) -> CertBlockHeader: + """Certificate block header.""" + return self._header + + @property + def rkh(self) -> List[bytes]: + """List of root keys hashes (SHA-256), each hash as 32 bytes.""" + return self._rkht.rkh_list + + @property + def rkth(self) -> bytes: + """Root Key Table Hash 32-byte hash (SHA-256) of SHA-256 hashes of up to four root public keys.""" + return self._rkht.rkth() + + @property + def rkth_fuses(self) -> List[int]: + """List of RKHT fuses, ordered from highest bit to lowest. + + Note: Returned values are in format that should be passed for blhost + """ + result = [] + rkht = self.rkth + while rkht: + fuse = int.from_bytes(rkht[:4], byteorder=Endianness.LITTLE.value) + result.append(fuse) + rkht = rkht[4:] + return result + + @property + def certificates(self) -> List[Certificate]: + """List of certificates in header. + + First certificate is root certificate and followed by optional chain certificates + """ + return self._cert + + @property + def signature_size(self) -> int: + """Size of the signature in bytes.""" + return len( + self.certificates[0].signature + ) # The certificate is self signed, return size of its signature + + @property + def rkh_index(self) -> Optional[int]: + """Index of the Root Key Hash that matches the certificate; None if does not match.""" + if self._cert: + rkh = self._cert[0].public_key_hash() + for index, value in enumerate(self.rkh): + if rkh == value: + return index + return None + + @property + def alignment(self) -> int: + """Alignment of the binary output, by default it is DEFAULT_ALIGNMENT but can be customized.""" + return self._alignment + + @alignment.setter + def alignment(self, value: int) -> None: + """Setter. + + :param value: new alignment + :raises SPSDKError: When there is invalid alignment + """ + if value <= 0: + raise SPSDKError("Invalid alignment") + self._alignment = value + + @property + def raw_size(self) -> int: + """Aligned size of the certificate block.""" + size = CertBlockHeader.SIZE + size += self._header.cert_table_length + size += self._rkht.RKH_SIZE * self._rkht.RKHT_SIZE + return align(size, self.alignment) + + @property + def expected_size(self) -> int: + """Expected size of binary block.""" + return self.raw_size + + @property + def image_length(self) -> int: + """Image length in bytes.""" + return self._header.image_length + + @image_length.setter + def image_length(self, value: int) -> None: + """Setter. + + :param value: new image length + :raises SPSDKError: When there is invalid image length + """ + if value <= 0: + raise SPSDKError("Invalid image length") + self._header.image_length = value + + def __init__( + self, version: str = "1.0", flags: int = 0, build_number: int = 0 + ) -> None: + """Constructor. + + :param version: of the certificate in format n.n + :param flags: Flags for the Certificate Block Header + :param build_number: of the certificate + """ + self._header = CertBlockHeader(version, flags, build_number) + self._rkht: RKHTv1 = RKHTv1([]) + self._cert: List[Certificate] = [] + self._alignment = self.DEFAULT_ALIGNMENT + + def __len__(self) -> int: + return len(self._cert) + + def set_root_key_hash( + self, index: int, key_hash: Union[bytes, bytearray, Certificate] + ) -> None: + """Add Root Key Hash into RKHT. + + Note: Multiple root public keys are supported to allow for key revocation. + + :param index: The index of Root Key Hash in the table + :param key_hash: The Root Key Hash value (32 bytes, SHA-256); + or Certificate where the hash can be created from public key + :raises SPSDKError: When there is invalid index of root key hash in the table + :raises SPSDKError: When there is invalid length of key hash + """ + if isinstance(key_hash, Certificate): + key_hash = get_hash(key_hash.get_public_key().export()) + assert isinstance(key_hash, (bytes, bytearray)) + if len(key_hash) != self._rkht.RKH_SIZE: + raise SPSDKError("Invalid length of key hash") + self._rkht.set_rkh(index, bytes(key_hash)) + + def add_certificate(self, cert: Union[bytes, Certificate]) -> None: + """Add certificate. + + First call adds root certificate. Additional calls add chain certificates. + + :param cert: The certificate itself in DER format + :raises SPSDKError: If certificate cannot be added + """ + if isinstance(cert, bytes): + cert_obj = Certificate.parse(cert) + elif isinstance(cert, Certificate): + cert_obj = cert + else: + raise SPSDKError("Invalid parameter type (cert)") + if cert_obj.version.name != "v3": + raise SPSDKError( + "Expected certificate v3 but received: " + cert_obj.version.name + ) + if self._cert: # chain certificate? + last_cert = self._cert[-1] # verify that it is signed by parent key + if not cert_obj.validate(last_cert): + raise SPSDKError( + "Chain certificate cannot be verified using parent public key" + ) + else: # root certificate + if not cert_obj.self_signed: + raise SPSDKError( + f"Root certificate must be self-signed.\n{str(cert_obj)}" + ) + self._cert.append(cert_obj) + self._header.cert_count += 1 + self._header.cert_table_length += cert_obj.raw_size + 4 + + def __repr__(self) -> str: + return str(self._header) + + def __str__(self) -> str: + """Text info about certificate block.""" + nfo = str(self.header) + nfo += " Public Root Keys Hash e.g. RKH (SHA256):\n" + rkh_index = self.rkh_index + for index, root_key in enumerate(self._rkht.rkh_list): + nfo += f" {index}) {root_key.hex().upper()} {'<- Used' if index == rkh_index else ''}\n" + rkth = self.rkth + nfo += f" RKTH (SHA256): {rkth.hex().upper()}\n" + for index, fuse in enumerate(self.rkth_fuses): + bit_ofs = (len(rkth) - 4 * index) * 8 + nfo += f" - RKTH fuse [{bit_ofs:03}:{bit_ofs - 31:03}]: {fuse:08X}\n" + for index, cert in enumerate(self._cert): + nfo += " Root Certificate:\n" if index == 0 else f" Certificate {index}:\n" + nfo += str(cert) + return nfo + + def verify_data(self, signature: bytes, data: bytes) -> bool: + """Signature verification. + + :param signature: to be verified + :param data: that has been signed + :return: True if the data signature can be confirmed using the certificate; False otherwise + """ + cert = self._cert[-1] + pub_key = cert.get_public_key() + return pub_key.verify_signature(signature=signature, data=data) + + def verify_private_key(self, private_key: PrivateKeyRsa) -> bool: + """Verify that given private key matches the public certificate. + + :param private_key: to be tested + :return: True if yes; False otherwise + """ + cert = self.certificates[-1] # last certificate + pub_key = cert.get_public_key() + return private_key.verify_public_key(pub_key) + + def export(self) -> bytes: + """Serialize Certificate Block V1 object.""" + # At least one certificate must be used + if not self._cert: + raise SPSDKError("At least one certificate must be used") + # The hast of root key certificate must be in RKHT + if self.rkh_index is None: + raise SPSDKError("The HASH of used Root Key must be in RKHT") + # CA: Using a single certificate is allowed. In this case, the sole certificate must be self-signed and must not + # be a CA. If multiple certificates are used, the root must be self-signed and all but the last must be CAs. + if self._cert[-1].ca: + raise SPSDKError("The last chain certificate must not be CA.") + if not all(cert.ca for cert in self._cert[:-1]): + raise SPSDKError( + "All certificates except the last chain certificate must be CA" + ) + # Export + data = self.header.export() + for cert in self._cert: + data += pack(" Self: + """Deserialize CertBlockV1 from binary file. + + :param data: Binary data + :return: Certificate Block instance + :raises SPSDKError: Length of the data doesn't match Certificate Block length + """ + header = CertBlockHeader.parse(data) + offset = CertBlockHeader.SIZE + if len(data) < ( + header.cert_table_length + (RKHTv1.RKHT_SIZE * RKHTv1.RKH_SIZE) + ): + raise SPSDKError( + "Length of the data doesn't match Certificate Block length" + ) + obj = cls( + version=header.version, flags=header.flags, build_number=header.build_number + ) + for _ in range(header.cert_count): + cert_len = unpack_from(" List[Dict[str, Any]]: + """Create the list of validation schemas. + + :return: List of validation schemas. + """ + sch_cfg = get_schema_file(DatabaseManager.CERT_BLOCK) + return [ + sch_cfg["certificate_v1"], + sch_cfg["certificate_root_keys"], + ] + + @staticmethod + def generate_config_template(_family: Optional[str] = None) -> str: + """Generate configuration for certification block v1.""" + val_schemas = CertBlockV1.get_validation_schemas() + val_schemas.append( + DatabaseManager().db.get_schema_file(DatabaseManager.CERT_BLOCK)[ + "cert_block_output" + ] + ) + return CommentedConfig( + "Certification Block V1 template", val_schemas + ).get_template() + + def create_config(self, data_path: str) -> str: + """Create configuration of the Certification block Image.""" + cfg = self.get_config(data_path) + val_schemas = CertBlockV1.get_validation_schemas() + + return CommentedConfig( + main_title=( + "Certification block v1 recreated configuration from :" + f"{datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S')}." + ), + schemas=val_schemas, + ).get_config(cfg) + + @classmethod + def get_root_private_key_file(cls, config: Dict[str, Any]) -> Optional[str]: + """Get main root private key file from config. + + :param config: Configuration to be searched. + :return: Root private key file path. + """ + private_key_file = config.get("mainCertPrivateKeyFile") + if private_key_file and not isinstance(private_key_file, str): + raise SPSDKTypeError("Root private key file must be a string type") + return private_key_file + + @classmethod + def from_config( + cls, + config: Dict[str, Any], + search_paths: Optional[List[str]] = None, + ) -> "CertBlockV1": + """Creates an instance of CertBlockV1 from configuration. + + :param config: Input standard configuration. + :param search_paths: List of paths where to search for the file, defaults to None + :return: Instance of CertBlockV1 + :raises SPSDKError: Invalid certificates detected, Invalid configuration. + """ + if not isinstance(config, Dict): + raise SPSDKError("Configuration cannot be parsed") + cert_block = config.get("certBlock") + if cert_block: + try: + return cls.parse(load_binary(cert_block, search_paths)) + except (SPSDKError, TypeError): + if search_paths: # append path to cert block + search_paths.append(os.path.dirname(cert_block)) + else: + search_paths = [os.path.dirname(cert_block)] + return cls.from_config( + load_configuration(cert_block, search_paths), search_paths + ) + + image_build_number = value_to_int(config.get("imageBuildNumber", 0)) + root_certificates: List[List[str]] = [[] for _ in range(4)] + # TODO we need to read the whole chain from the dict for a given + # selection based on mainCertPrivateKeyFile!!! + root_certificates[0].append(config.get("rootCertificate0File", None)) + root_certificates[1].append(config.get("rootCertificate1File", None)) + root_certificates[2].append(config.get("rootCertificate2File", None)) + root_certificates[3].append(config.get("rootCertificate3File", None)) + main_cert_chain_id = cls.get_main_cert_index(config, search_paths=search_paths) + if root_certificates[main_cert_chain_id][0] is None: + raise SPSDKError( + f"A key rootCertificate{main_cert_chain_id}File must be defined" + ) + + # get all certificate chain related keys from config + pattern = f"chainCertificate{main_cert_chain_id}File[0-3]" + keys = [key for key in config.keys() if re.fullmatch(pattern, key)] + # just in case, sort the chain certificate keys in order + keys.sort() + for key in keys: + root_certificates[main_cert_chain_id].append(config[key]) + + cert_block = CertBlockV1(build_number=image_build_number) + + # add whole certificate chain used for image signing + for cert_path in root_certificates[main_cert_chain_id]: + cert_data = Certificate.load( + find_file(str(cert_path), search_paths=search_paths) + ).export(SPSDKEncoding.DER) + cert_block.add_certificate(cert_data) + # set root key hash of each root certificate + empty_rec = False + for cert_idx, cert_path_list in enumerate(root_certificates): + if cert_path_list[0]: + if empty_rec: + raise SPSDKError( + "There are gaps in rootCertificateXFile definition" + ) + cert_data = Certificate.load( + find_file(str(cert_path_list[0]), search_paths=search_paths) + ).export(SPSDKEncoding.DER) + cert_block.set_root_key_hash(cert_idx, Certificate.parse(cert_data)) + else: + empty_rec = True + + return cert_block + + def get_config(self, output_folder: str) -> Dict[str, Any]: + """Create configuration of Certificate V2 from object. + + :param output_folder: Output folder to store possible files. + :return: Configuration dictionary. + """ + + def create_certificate_cfg(root_id: int, chain_id: int) -> Optional[str]: + if len(self._cert) <= chain_id: + return None + + file_name = f"certificate{root_id}_depth{chain_id}.der" + self._cert[chain_id].save(os.path.join(output_folder, file_name)) + return file_name + + cfg: Dict[str, Optional[Union[str, int]]] = {} + cfg["imageBuildNumber"] = self.header.build_number + used_cert_id = self.rkh_index + assert used_cert_id is not None + cfg["mainRootCertId"] = used_cert_id + + cfg[f"rootCertificate{used_cert_id}File"] = create_certificate_cfg( + used_cert_id, 0 + ) + for chain_ix in range(4): + cfg[ + f"chainCertificate{used_cert_id}File{chain_ix}" + ] = create_certificate_cfg(used_cert_id, chain_ix + 1) + + return cfg + + @classmethod + def get_supported_families(cls) -> List[str]: + """Get list of supported families.""" + return super()._get_supported_families("cert_block_1") + + +######################################################################################################################## +# Certificate Block Class for SB 3.1 +######################################################################################################################## + + +def convert_to_ecc_key(key: Union[PublicKeyEcc, bytes]) -> PublicKeyEcc: + """Convert key into EccKey instance.""" + if isinstance(key, PublicKeyEcc): + return key + try: + pub_key = extract_public_key_from_data(key) + if not isinstance(pub_key, PublicKeyEcc): + raise SPSDKError("Not ECC key") + return pub_key + except Exception: + pass + # Just recreate public key from the parsed data + return PublicKeyEcc.parse(key) + + +class CertificateBlockHeader(BaseClass): + """Create Certificate block header.""" + + FORMAT = "<4s2HL" + SIZE = calcsize(FORMAT) + MAGIC = b"chdr" + + def __init__(self, format_version: str = "2.1") -> None: + """Constructor for Certificate block header version 2.1. + + :param format_version: Major = 2, minor = 1 + """ + self.format_version = format_version + self.cert_block_size = 0 + + def export(self) -> bytes: + """Export Certificate block header as bytes array.""" + major_format_version, minor_format_version = [ + int(v) for v in self.format_version.split(".") + ] + + return pack( + self.FORMAT, + self.MAGIC, + minor_format_version, + major_format_version, + self.cert_block_size, + ) + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse Certificate block header from bytes array. + + :param data: Input data as bytes + :raises SPSDKError: Raised when SIZE is bigger than length of the data without offset + :raises SPSDKError: Raised when magic is not equal MAGIC + :return: CertificateBlockHeader + """ + if cls.SIZE > len(data): + raise SPSDKError("SIZE is bigger than length of the data without offset") + ( + magic, + minor_format_version, + major_format_version, + cert_block_size, + ) = unpack_from(cls.FORMAT, data) + + if magic != cls.MAGIC: + raise SPSDKError("Magic is not same!") + + obj = cls(format_version=f"{major_format_version}.{minor_format_version}") + obj.cert_block_size = cert_block_size + return obj + + def __len__(self) -> int: + """Length of the Certificate block header.""" + return calcsize(self.FORMAT) + + def __repr__(self) -> str: + return f"Cert block header {self.format_version}" + + def __str__(self) -> str: + """Get info of Certificate block header.""" + info = f"Format version: {self.format_version}\n" + info += f"Certificate block size: {self.cert_block_size}\n" + return info + + +class RootKeyRecord(BaseClass): + """Create Root key record.""" + + # P-256 + + def __init__( + self, + ca_flag: bool, + root_certs: Optional[Union[Sequence[PublicKeyEcc], Sequence[bytes]]] = None, + used_root_cert: int = 0, + ) -> None: + """Constructor for Root key record. + + :param ca_flag: CA flag + :param root_certs: Root cert used to ISK/image signature + :param used_root_cert: Used root cert number 0-3 + """ + self.ca_flag = ca_flag + self.root_certs_input = root_certs + self.root_certs: List[PublicKeyEcc] = [] + self.used_root_cert = used_root_cert + self.flags = 0 + self._rkht = RKHTv21([]) + self.root_public_key = b"" + + @property + def number_of_certificates(self) -> int: + """Get number of included certificates.""" + return (self.flags & 0xF0) >> 4 + + @property + def expected_size(self) -> int: + """Get expected binary block size.""" + # the '4' means 4 bytes for flags + return 4 + len(self._rkht.export()) + len(self.root_public_key) + + def __repr__(self) -> str: + cert_type = {0x1: "secp256r1", 0x2: "secp384r1"}[self.flags & 0xF] + return f"Cert Block: Root Key Record - ({cert_type})" + + def __str__(self) -> str: + """Get info of Root key record.""" + cert_type = {0x1: "secp256r1", 0x2: "secp384r1"}[self.flags & 0xF] + info = "" + info += f"Flags: {hex(self.flags)}\n" + info += f" - CA: {bool(self.ca_flag)}, ISK Certificate is {'not ' if self.ca_flag else ''}mandatory\n" + info += f" - Used Root c.:{self.used_root_cert}\n" + info += f" - Number of c.:{self.number_of_certificates}\n" + info += f" - Cert. type: {cert_type}\n" + if self.root_certs: + info += f"Root certs: {self.root_certs}\n" + if self._rkht.rkh_list: + info += f"CTRK Hash table: {self._rkht.export().hex()}\n" + if self.root_public_key: + info += ( + f"Root public key: {str(convert_to_ecc_key(self.root_public_key))}\n" + ) + + return info + + def _calculate_flags(self) -> int: + """Function to calculate parameter flags.""" + flags = 0 + if self.ca_flag is True: + flags |= 1 << 31 + if self.used_root_cert: + flags |= self.used_root_cert << 8 + flags |= len(self.root_certs) << 4 + if self.root_certs[0].curve in ["NIST P-256", "p256", "secp256r1"]: + flags |= 1 << 0 + if self.root_certs[0].curve in ["NIST P-384", "p384", "secp384r1"]: + flags |= 1 << 1 + return flags + + def _create_root_public_key(self) -> bytes: + """Function to create root public key.""" + root_key = self.root_certs[self.used_root_cert] + root_key_data = root_key.export() + return root_key_data + + def calculate(self) -> None: + """Calculate all internal members. + + :raises SPSDKError: The RKHT certificates inputs are missing. + """ + # pylint: disable=invalid-name + if not self.root_certs_input: + raise SPSDKError( + "Root Key Record: The root of trust certificates are not specified." + ) + self.root_certs = [convert_to_ecc_key(cert) for cert in self.root_certs_input] + self.flags = self._calculate_flags() + self._rkht = RKHTv21.from_keys(keys=self.root_certs) + if self._rkht.hash_algorithm != self.get_hash_algorithm(self.flags): + raise SPSDKError("Hash algorithm does not match the key size.") + self.root_public_key = self._create_root_public_key() + + def export(self) -> bytes: + """Export Root key record as bytes array.""" + data = bytes() + data += pack(" EnumHashAlgorithm: + """Get CTRK table hash algorithm. + + :param flags: Root Key Record flags + :return: Name of hash algorithm + """ + return {1: EnumHashAlgorithm.SHA256, 2: EnumHashAlgorithm.SHA384}[flags & 0xF] + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse Root key record from bytes array. + + :param data: Input data as bytes array + :return: Root key record object + """ + (flags,) = unpack_from("> 8 + number_of_hashes = (flags & 0xF0) >> 4 + rotkh_len = {0x0: 32, 0x1: 32, 0x2: 48}[flags & 0xF] + root_key_record = cls( + ca_flag=ca_flag, root_certs=[], used_root_cert=used_rot_ix + ) + root_key_record.flags = flags + offset = 4 # move offset just after FLAGS + if number_of_hashes > 1: + rkht_len = rotkh_len * number_of_hashes + rkht = data[offset : offset + rkht_len] + offset += rkht_len + root_key_record.root_public_key = data[offset : offset + rotkh_len * 2] + root_key_record._rkht = ( + RKHTv21.parse(rkht, cls.get_hash_algorithm(flags)) + if number_of_hashes > 1 + else RKHTv21( + [ + get_hash( + root_key_record.root_public_key, cls.get_hash_algorithm(flags) + ) + ] + ) + ) + return root_key_record + + +class IskCertificate(BaseClass): + """Create ISK certificate.""" + + def __init__( + self, + constraints: int = 0, + signature_provider: Optional[SignatureProvider] = None, + isk_cert: Optional[Union[PublicKeyEcc, bytes]] = None, + user_data: Optional[bytes] = None, + offset_present: bool = True, + family: Optional[str] = None, + ) -> None: + """Constructor for ISK certificate. + + :param constraints: Certificate version + :param signature_provider: ISK Signature Provider + :param isk_cert: ISK certificate + :param user_data: User data + """ + self.flags = 0 + self.offset_present = offset_present + self.constraints = constraints + self.signature_provider = signature_provider + self.isk_cert = convert_to_ecc_key(isk_cert) if isk_cert else None + self.user_data = user_data or bytes() + if family: + db = get_db(device=family) + isk_data_limit = db.get_int(DatabaseManager.CERT_BLOCK, "isk_data_limit") + if len(self.user_data) > isk_data_limit: + raise SPSDKError( + f"ISK user data is too big ({len(self.user_data)} B). Max size is: {isk_data_limit} B." + ) + isk_data_alignment = db.get_int( + DatabaseManager.CERT_BLOCK, "isk_data_alignment" + ) + if len(self.user_data) % isk_data_alignment: + raise SPSDKError( + f"ISK user data is not aligned to {isk_data_alignment} B." + ) + self.signature = bytes() + self.coordinate_length = ( + self.signature_provider.signature_length // 2 + if self.signature_provider + else 0 + ) + self.isk_public_key_data = self.isk_cert.export() if self.isk_cert else bytes() + + self._calculate_flags() + + @property + def signature_offset(self) -> int: + """Signature offset inside the ISK Certificate.""" + offset = calcsize("<3L") if self.offset_present else calcsize("<2L") + signature_offset = offset + len(self.user_data) + if self.isk_cert: + signature_offset += 2 * self.isk_cert.coordinate_size + + return signature_offset + + @property + def expected_size(self) -> int: + """Binary block expected size.""" + sign_len = len(self.signature) or ( + self.signature_provider.signature_length if self.signature_provider else 0 + ) + pub_key_len = ( + self.isk_cert.coordinate_size * 2 + if self.isk_cert + else len(self.isk_public_key_data) + ) + + offset = 4 if self.offset_present else 0 + return ( + offset # signature offset + + 4 # constraints + + 4 # flags + + pub_key_len # isk public key coordinates + + len(self.user_data) # user data + + sign_len # isk blob signature + ) + + def __repr__(self) -> str: + isk_type = {0: "secp256r1", 1: "secp256r1", 2: "secp384r1"}[self.flags & 0xF] + return f"ISK Certificate, {isk_type}" + + def __str__(self) -> str: + """Get info about ISK certificate.""" + isk_type = {0: "secp256r1", 1: "secp256r1", 2: "secp384r1"}[self.flags & 0xF] + info = "" + info += f"Constraints: {self.constraints}\n" + info += f"Flags: {self.flags}\n" + if self.user_data: + info += f"User data: {self.user_data.hex()}\n" + else: + info += "User data: Not included\n" + info += f"Type: {isk_type}\n" + info += f"Public Key: {str(self.isk_cert)}\n" + return info + + def _calculate_flags(self) -> None: + """Function to calculate parameter flags.""" + self.flags = 0 + if self.user_data: + self.flags |= 1 << 31 + assert self.isk_cert + if self.isk_cert.curve == "secp256r1": + self.flags |= 1 << 0 + if self.isk_cert.curve == "secp384r1": + self.flags |= 1 << 1 + + def create_isk_signature(self, key_record_data: bytes, force: bool = False) -> None: + """Function to create ISK signature. + + :raises SPSDKError: Signature provider is not specified. + """ + # pylint: disable=invalid-name + if self.signature and not force: + return + if not self.signature_provider: + raise SPSDKError( + "ISK Certificate: The signature provider is not specified." + ) + if self.offset_present: + data = key_record_data + pack( + "<3L", self.signature_offset, self.constraints, self.flags + ) + else: + data = key_record_data + pack("<2L", self.constraints, self.flags) + data += self.isk_public_key_data + self.user_data + self.signature = self.signature_provider.get_signature(data) + + def export(self) -> bytes: + """Export ISK certificate as bytes array.""" + if not self.signature: + raise SPSDKError("Signature is not set.") + if self.offset_present: + data = pack("<3L", self.signature_offset, self.constraints, self.flags) + else: + data = pack("<2L", self.constraints, self.flags) + data += self.isk_public_key_data + if self.user_data: + data += self.user_data + data += self.signature + + assert len(data) == self.expected_size + return data + + @classmethod + def parse(cls, data: bytes, signature_size: int) -> Self: # type: ignore # pylint: disable=arguments-differ + """Parse ISK certificate from bytes array.This operation is not supported. + + :param data: Input data as bytes array + :param signature_size: The signature size of ISK block + :raises NotImplementedError: This operation is not supported + """ + (signature_offset, constraints, isk_flags) = unpack_from("<3L", data) + header_word_cnt = 3 + if ( + signature_offset & 0xFFFF == 0x4D43 + ): # This means that certificate has no offset + (constraints, isk_flags) = unpack_from("<2L", data) + signature_offset = 72 + header_word_cnt = 2 + user_data_flag = bool(isk_flags & 0x80000000) + isk_pub_key_length = {0x0: 32, 0x1: 32, 0x2: 48}[isk_flags & 0xF] + offset = header_word_cnt * 4 + isk_pub_key_bytes = data[offset : offset + isk_pub_key_length * 2] + offset += isk_pub_key_length * 2 + user_data = data[offset:signature_offset] if user_data_flag else None + signature = data[signature_offset : signature_offset + signature_size] + offset_present = header_word_cnt == 3 + certificate = cls( + constraints=constraints, + isk_cert=isk_pub_key_bytes, + user_data=user_data, + offset_present=offset_present, + ) + certificate.signature = signature + return certificate + + +class IskCertificateLite(BaseClass): + """ISK certificate lite.""" + + MAGIC = 0x4D43 + VERSION = 1 + HEADER_FORMAT = " None: + """Constructor for ISK certificate. + + :param pub_key: ISK public key + :param constraints: 1 = self signed, 0 = nxp signed + :param user_data: User data + """ + self.constraints = constraints + self.pub_key = convert_to_ecc_key(pub_key) + self.signature = bytes() + self.isk_public_key_data = self.pub_key.export() + + @property + def expected_size(self) -> int: + """Binary block expected size.""" + return ( + +4 # magic + version + + 4 # constraints + + self.ISK_PUB_KEY_LENGTH # isk public key coordinates + + self.ISK_SIGNATURE_SIZE # isk blob signature + ) + + def __repr__(self) -> str: + return "ISK Certificate lite" + + def __str__(self) -> str: + """Get info about ISK certificate.""" + info = "ISK Certificate lite\n" + info += f"Constraints: {self.constraints}\n" + info += f"Public Key: {str(self.pub_key)}\n" + return info + + def create_isk_signature( + self, signature_provider: Optional[SignatureProvider], force: bool = False + ) -> None: + """Function to create ISK signature. + + :param signature_provider: Signature Provider + :param force: Force resign. + :raises SPSDKError: Signature provider is not specified. + """ + # pylint: disable=invalid-name + if self.signature and not force: + return + if not signature_provider: + raise SPSDKError( + "ISK Certificate: The signature provider is not specified." + ) + + data = pack(self.HEADER_FORMAT, self.MAGIC, self.VERSION, self.constraints) + data += self.isk_public_key_data + self.signature = signature_provider.get_signature(data) + + def export(self) -> bytes: + """Export ISK certificate as bytes array.""" + if not self.signature: + raise SPSDKError("Signature is not set.") + + data = pack(self.HEADER_FORMAT, self.MAGIC, self.VERSION, self.constraints) + data += self.isk_public_key_data + data += self.signature + + assert len(data) == self.expected_size, "ISK Cert data size does not match" + return data + + @classmethod + def parse(cls, data: bytes) -> Self: # pylint: disable=arguments-differ + """Parse ISK certificate from bytes array. + + :param data: Input data as bytes array + :raises NotImplementedError: This operation is not supported + """ + (_, _, constraints) = unpack_from(cls.HEADER_FORMAT, data) + offset = calcsize(cls.HEADER_FORMAT) + isk_pub_key_bytes = data[offset : offset + cls.ISK_PUB_KEY_LENGTH] + offset += cls.ISK_PUB_KEY_LENGTH + signature = data[offset : offset + cls.ISK_SIGNATURE_SIZE] + certificate = cls( + constraints=constraints, + pub_key=isk_pub_key_bytes, + ) + certificate.signature = signature + return certificate + + +class CertBlockV21(CertBlock): + """Create Certificate block version 2.1. + + Used for SB 3.1 and MBI using ECC keys. + """ + + MAGIC = b"chdr" + FORMAT_VERSION = "2.1" + + def __init__( + self, + root_certs: Optional[Union[Sequence[PublicKeyEcc], Sequence[bytes]]] = None, + ca_flag: bool = False, + version: str = "2.1", + used_root_cert: int = 0, + constraints: int = 0, + signature_provider: Optional[SignatureProvider] = None, + isk_cert: Optional[Union[PublicKeyEcc, bytes]] = None, + user_data: Optional[bytes] = None, + family: Optional[str] = None, + ) -> None: + """The Constructor for Certificate block.""" + self.header = CertificateBlockHeader(version) + self.root_key_record = RootKeyRecord( + ca_flag=ca_flag, used_root_cert=used_root_cert, root_certs=root_certs + ) + + self.isk_certificate = None + if not ca_flag and signature_provider and isk_cert: + self.isk_certificate = IskCertificate( + constraints=constraints, + signature_provider=signature_provider, + isk_cert=isk_cert, + user_data=user_data, + family=family, + ) + + def _set_ca_flag(self, value: bool) -> None: + self.root_key_record.ca_flag = value + + def calculate(self) -> None: + """Calculate all internal members.""" + self.root_key_record.calculate() + + @property + def signature_size(self) -> int: + """Size of the signature in bytes.""" + # signature size is same as public key data + if self.isk_certificate: + return len(self.isk_certificate.isk_public_key_data) + + return len(self.root_key_record.root_public_key) + + @property + def expected_size(self) -> int: + """Expected size of binary block.""" + expected_size = self.header.SIZE + expected_size += self.root_key_record.expected_size + if self.isk_certificate: + expected_size += self.isk_certificate.expected_size + return expected_size + + @property + def rkth(self) -> bytes: + """Root Key Table Hash 32-byte hash (SHA-256) of SHA-256 hashes of up to four root public keys.""" + return self.root_key_record._rkht.rkth() + + def __repr__(self) -> str: + return f"Cert block 2.1, Size:{self.expected_size}B" + + def __str__(self) -> str: + """Get info of Certificate block.""" + msg = f"HEADER:\n{str(self.header)}\n" + msg += f"ROOT KEY RECORD:\n{str(self.root_key_record)}\n" + if self.isk_certificate: + msg += f"ISK Certificate:\n{str(self.isk_certificate)}\n" + return msg + + def export(self) -> bytes: + """Export Certificate block as bytes array.""" + key_record_data = self.root_key_record.export() + self.header.cert_block_size = self.header.SIZE + len(key_record_data) + isk_cert_data = bytes() + if self.isk_certificate: + self.isk_certificate.create_isk_signature(key_record_data) + isk_cert_data = self.isk_certificate.export() + self.header.cert_block_size += len(isk_cert_data) + header_data = self.header.export() + return header_data + key_record_data + isk_cert_data + + @classmethod + def parse(cls, data: bytes) -> Self: + """Parse Certificate block from bytes array.This operation is not supported. + + :param data: Input data as bytes array + :raises SPSDKError: Magic do not match + """ + # CertificateBlockHeader + cert_header = CertificateBlockHeader.parse(data) + offset = len(cert_header) + # RootKeyRecord + root_key_record = RootKeyRecord.parse(data[offset:]) + offset += root_key_record.expected_size + # IskCertificate + isk_certificate = None + if root_key_record.ca_flag == 0: + isk_certificate = IskCertificate.parse( + data[offset:], len(root_key_record.root_public_key) + ) + # Certification Block V2.1 + cert_block = cls() + cert_block.header = cert_header + cert_block.root_key_record = root_key_record + cert_block.isk_certificate = isk_certificate + return cert_block + + @classmethod + def get_validation_schemas(cls) -> List[Dict[str, Any]]: + """Create the list of validation schemas. + + :return: List of validation schemas. + """ + sch_cfg = get_schema_file(DatabaseManager.CERT_BLOCK) + return [sch_cfg["certificate_v21"], sch_cfg["certificate_root_keys"]] + + @classmethod + def from_config( + cls, config: Dict[str, Any], search_paths: Optional[List[str]] = None + ) -> "CertBlockV21": + """Creates an instance of CertBlockV21 from configuration. + + :param config: Input standard configuration. + :param search_paths: List of paths where to search for the file, defaults to None + :return: Instance of CertBlockV21 + :raises SPSDKError: If found gap in certificates from config file. Invalid configuration. + """ + if not isinstance(config, Dict): + raise SPSDKError("Configuration cannot be parsed") + cert_block = config.get("certBlock") + if cert_block: + try: + return cls.parse(load_binary(cert_block, search_paths)) + except (SPSDKError, TypeError): + if search_paths: # append path to cert block + search_paths.append(os.path.dirname(cert_block)) + else: + search_paths = [os.path.dirname(cert_block)] + cert_block_data = load_configuration(cert_block, search_paths) + # temporarily pass-down family to cert-block config data + cert_block_data["family"] = config["family"] + return cls.from_config(cert_block_data, search_paths) + + root_certificates = find_root_certificates(config) + main_root_cert_id = cls.get_main_cert_index(config, search_paths=search_paths) + + try: + root_certificates[main_root_cert_id] + except IndexError as e: + raise SPSDKError( + f"Main root certificate with id {main_root_cert_id} does not exist" + ) from e + + root_certs = [ + load_binary(cert_file, search_paths=search_paths) + for cert_file in root_certificates + ] + + user_data = None + signature_provider = None + isk_cert = None + + use_isk = config.get("useIsk", False) + if use_isk: + signature_provider_config = config.get("signProvider") + signature_provider = get_signature_provider( + signature_provider_config, + cls.get_root_private_key_file(config), + search_paths=search_paths, + ) + + isk_public_key = config.get( + "iskPublicKey", config.get("signingCertificateFile") + ) + isk_cert = load_binary(isk_public_key, search_paths=search_paths) + + isk_sign_data_path = config.get("iskCertData", config.get("signCertData")) + if isk_sign_data_path: + user_data = load_binary(isk_sign_data_path, search_paths=search_paths) + + isk_constraint = value_to_int( + config.get( + "iskCertificateConstraint", + config.get("signingCertificateConstraint", "0"), + ) + ) + family = config.get("family") + cert_block = cls( + root_certs=root_certs, + used_root_cert=main_root_cert_id, + user_data=user_data, + constraints=isk_constraint, + isk_cert=isk_cert, + ca_flag=not use_isk, + signature_provider=signature_provider, + family=family, + ) + cert_block.calculate() + + return cert_block + + def validate(self) -> None: + """Validate the settings of class members. + + :raises SPSDKError: Invalid configuration of certification block class members. + """ + self.header.parse(self.header.export()) + if self.isk_certificate and not self.isk_certificate.signature: + if not isinstance( + self.isk_certificate.signature_provider, SignatureProvider + ): + raise SPSDKError("Invalid ISK certificate.") + + @staticmethod + def generate_config_template(family: Optional[str] = None) -> str: + """Generate configuration for certification block v21.""" + val_schemas = CertBlockV21.get_validation_schemas() + val_schemas.append( + DatabaseManager().db.get_schema_file(DatabaseManager.CERT_BLOCK)[ + "cert_block_output" + ] + ) + + if family: + # find family + for schema in val_schemas: + if "properties" in schema and "family" in schema["properties"]: + schema["properties"]["family"]["template_value"] = family + break + return CommentedConfig( + "Certification Block V21 template", val_schemas + ).get_template() + + def get_config(self, output_folder: str) -> Dict[str, Any]: + """Create configuration dictionary of the Certification block Image. + + :param output_folder: Path to store the data files of configuration. + :return: Configuration dictionary. + """ + cfg: Dict[str, Optional[Union[str, int]]] = {} + cfg["mainRootCertPrivateKeyFile"] = "N/A" + cfg["signingCertificatePrivateKeyFile"] = "N/A" + for i in range(self.root_key_record.number_of_certificates): + key: Optional[PublicKeyEcc] = None + if i == self.root_key_record.used_root_cert: + key = convert_to_ecc_key(self.root_key_record.root_public_key) + else: + if ( + i < len(self.root_key_record.root_certs) + and self.root_key_record.root_certs[i] + ): + key = convert_to_ecc_key(self.root_key_record.root_certs[i]) + if key: + key_file_name = os.path.join( + output_folder, f"rootCertificate{i}File.pub" + ) + key.save(key_file_name) + cfg[f"rootCertificate{i}File"] = f"rootCertificate{i}File.pub" + else: + cfg[ + f"rootCertificate{i}File" + ] = "The public key is not possible reconstruct from the key hash" + + cfg["mainRootCertId"] = self.root_key_record.used_root_cert + if self.isk_certificate and self.root_key_record.ca_flag == 0: + cfg["useIsk"] = True + assert self.isk_certificate.isk_cert + key = self.isk_certificate.isk_cert + key_file_name = os.path.join(output_folder, "signingCertificateFile.pub") + key.save(key_file_name) + cfg["signingCertificateFile"] = "signingCertificateFile.pub" + cfg["signingCertificateConstraint"] = self.isk_certificate.constraints + if self.isk_certificate.user_data: + key_file_name = os.path.join(output_folder, "isk_user_data.bin") + write_file(self.isk_certificate.user_data, key_file_name, mode="wb") + cfg["signCertData"] = "isk_user_data.bin" + + else: + cfg["useIsk"] = False + + return cfg + + def create_config(self, data_path: str) -> str: + """Create configuration of the Certification block Image. + + :param data_path: Path to store the data files of configuration. + :return: Configuration in string. + """ + cfg = self.get_config(data_path) + val_schemas = CertBlockV21.get_validation_schemas() + + return CommentedConfig( + main_title=( + "Certification block v2.1 recreated configuration from :" + f"{datetime.datetime.now().strftime('%d/%m/%Y %H:%M:%S')}." + ), + schemas=val_schemas, + ).get_config(cfg) + + @classmethod + def get_supported_families(cls) -> List[str]: + """Get list of supported families.""" + return super()._get_supported_families("cert_block_21") + + +######################################################################################################################## +# Certificate Block Class for SB X +######################################################################################################################## + + +######################################################################################################################## +# Certificate Block Class for SB X +######################################################################################################################## + + +class CertBlockVx(CertBlock): + """Create Certificate block for MC56xx.""" + + ISK_CERT_LENGTH = 136 + ISK_CERT_HASH_LENGTH = 16 # [0:127] + + def __init__( + self, + isk_cert: Union[PublicKeyEcc, bytes], + signature_provider: Optional[SignatureProvider] = None, + self_signed: bool = True, + ) -> None: + """The Constructor for Certificate block.""" + self.isk_cert_hash = bytes(self.ISK_CERT_HASH_LENGTH) + self.isk_certificate = IskCertificateLite( + pub_key=isk_cert, constraints=int(self_signed) + ) + self.signature_provider = signature_provider + + @property + def expected_size(self) -> int: + """Expected size of binary block.""" + return self.isk_certificate.expected_size + + @property + def cert_hash(self) -> bytes: + """Calculate first half [:127] of certificate hash.""" + isk_cert_data = self.isk_certificate.export() + return get_hash(isk_cert_data)[: self.ISK_CERT_HASH_LENGTH] + + def __repr__(self) -> str: + return "CertificateBlockVx" + + def __str__(self) -> str: + """Get info about Certificate block.""" + msg = "Certificate block version x\n" + msg += f"ISK Certificate:\n{str(self.isk_certificate)}\n" + msg += f"Certificate hash: {self.cert_hash.hex()}" + return msg + + def export(self) -> bytes: + """Export Certificate block as bytes array.""" + isk_cert_data = bytes() + self.isk_certificate.create_isk_signature(self.signature_provider) + isk_cert_data = self.isk_certificate.export() + return isk_cert_data + + @classmethod + def parse(cls, data: bytes) -> "Self": + """Parse Certificate block from bytes array.This operation is not supported. + + :param data: Input data as bytes array + :raises SPSDKValueError: In case of inval + """ + # IskCertificate + isk_certificate = IskCertificateLite.parse(data) + cert_block = cls( + isk_cert=isk_certificate.isk_public_key_data, + self_signed=bool(isk_certificate.constraints), + ) + cert_block.isk_certificate.signature = isk_certificate.signature + return cert_block + + @classmethod + def get_validation_schemas(cls) -> List[Dict[str, Any]]: + """Create the list of validation schemas. + + :return: List of validation schemas. + """ + sch_cfg = get_schema_file(DatabaseManager.CERT_BLOCK) + return [sch_cfg["certificate_vx"]] + + def create_config(self, data_path: str) -> str: + """Create configuration of the Certification block Image.""" + raise SPSDKNotImplementedError("Parsing of Cert Block Vx is not supported") + + @classmethod + def from_config( + cls, config: Dict[str, Any], search_paths: Optional[List[str]] = None + ) -> "CertBlockVx": + """Creates an instance of CertBlockVx from configuration. + + :param config: Input standard configuration. + :param search_paths: List of paths where to search for the file, defaults to None + :return: CertBlockVx + :raises SPSDKError: If found gap in certificates from config file. Invalid configuration. + """ + if not isinstance(config, Dict): + raise SPSDKError("Configuration cannot be parsed") + cert_block = config.get("certBlock") + if cert_block: + try: + return cls.parse(load_binary(cert_block, search_paths)) + except Exception: + return cls.from_config( + load_configuration(cert_block, search_paths), search_paths + ) + + main_root_private_key_file = cls.get_root_private_key_file(config) + signature_provider = config.get("signProvider", config.get("iskSignProvider")) + isk_certificate = config.get( + "iskPublicKey", config.get("signingCertificateFile") + ) + + signature_provider = get_signature_provider( + signature_provider, + main_root_private_key_file, + search_paths=search_paths, + ) + isk_cert = load_binary(isk_certificate, search_paths=search_paths) + self_signed = config.get("selfSigned", True) + cert_block = cls( + signature_provider=signature_provider, + isk_cert=isk_cert, + self_signed=self_signed, + ) + + return cert_block + + def validate(self) -> None: + """Validate the settings of class members. + + :raises SPSDKError: Invalid configuration of certification block class members. + """ + if self.isk_certificate and not self.isk_certificate.signature: + if not isinstance(self.signature_provider, SignatureProvider): + raise SPSDKError("Invalid ISK certificate.") + + @staticmethod + def generate_config_template(_family: Optional[str] = None) -> str: + """Generate configuration for certification block vX.""" + val_schemas = CertBlockVx.get_validation_schemas() + val_schemas.append( + DatabaseManager().db.get_schema_file(DatabaseManager.CERT_BLOCK)[ + "cert_block_output" + ] + ) + return CommentedConfig( + "Certification Block Vx template", val_schemas + ).get_template() + + @classmethod + def get_supported_families(cls) -> List[str]: + """Get list of supported families.""" + return super()._get_supported_families("cert_block_x") + + def get_otp_script(self) -> str: + """Return script for writing certificate hash to OTP. + + :return: string value of blhost script + """ + ret = ( + "# BLHOST Cert Block Vx fuses programming script\n" + f"# Generated by SPSDK {spsdk_version}\n" + f"# ISK Cert hash [0:127]: {self.cert_hash.hex()} \n\n" + ) + + fuse_value = change_endianness(self.cert_hash) + fuse_idx = 12 # Fuse start IDX + for fuse_data in split_data(fuse_value, 4): + ret += f"flash-program-once {hex(fuse_idx)} 4 {fuse_data.hex()}\n" + fuse_idx += 1 + + return ret + + +def find_root_certificates(config: Dict[str, Any]) -> List[str]: + """Find all root certificates in configuration. + + :param config: Configuration to be searched. + :raises SPSDKError: If invalid configuration is provided. + :return: List of root certificates. + """ + root_certificates_loaded: List[Optional[str]] = [ + config.get(f"rootCertificate{idx}File") for idx in range(4) + ] + # filter out None and empty values + root_certificates = list(filter(None, root_certificates_loaded)) + for org, filtered in zip(root_certificates_loaded, root_certificates): + if org != filtered: + raise SPSDKError("There are gaps in rootCertificateXFile definition") + return root_certificates + + +def get_keys_or_rotkh_from_certblock_config( + rot: Optional[str], family: Optional[str] +) -> Tuple[Optional[Iterable[str]], Optional[bytes]]: + """Get keys or ROTKH value from ROT config. + + ROT config might be cert block config or MBI config. + There are four cases how cert block might be configured. + + 1. MBI with certBlock property pointing to YAML file + 2. MBI with certBlock property pointing to BIN file + 3. YAML configuration of cert block + 4. Binary cert block + + :param rot: Path to ROT configuration (MBI or cert block) + or path to binary cert block + :param family: MCU family + :raises SPSDKError: In case the ROTKH or keys cannot be parsed + :return: Tuple containing root of trust (list of paths to keys) + or ROTKH in case of binary cert block + """ + root_of_trust = None + rotkh = None + if rot and family: + logger.info("Loading configuration from cert block/MBI file...") + config_dir = os.path.dirname(rot) + try: + config_data = load_configuration(rot, search_paths=[config_dir]) + if "certBlock" in config_data: + try: + config_data = load_configuration( + config_data["certBlock"], search_paths=[config_dir] + ) + except SPSDKError: + cert_block = load_binary( + config_data["certBlock"], search_paths=[config_dir] + ) + parsed_cert_block = CertBlock.get_cert_block_class(family).parse( + cert_block + ) + rotkh = parsed_cert_block.rkth + public_keys = find_root_certificates(config_data) + root_of_trust = tuple( + (find_file(x, search_paths=[config_dir]) for x in public_keys) + ) + except SPSDKError: + logger.debug("Parsing ROT from config did not succeed, trying it as binary") + try: + cert_block = load_binary(rot, search_paths=[config_dir]) + parsed_cert_block = CertBlock.get_cert_block_class(family).parse( + cert_block + ) + rotkh = parsed_cert_block.rkth + except SPSDKError as e: + raise SPSDKError(f"Parsing of binary cert block failed with {e}") from e + + return root_of_trust, rotkh diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/otfad.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/otfad.py new file mode 100644 index 00000000..bb28a85d --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/otfad.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""The module provides support for On-The-Fly encoding for RTxxx devices.""" + +import logging +from struct import pack +from typing import Any, Dict, List, Optional, Union + +from crcmod.predefined import mkPredefinedCrcFun + +from ...crypto.rng import random_bytes +from ...crypto.symmetric import Counter, aes_ctr_encrypt, aes_key_wrap +from ...exceptions import SPSDKError, SPSDKValueError +from ...utils.misc import Endianness, align_block + +logger = logging.getLogger(__name__) + + +class KeyBlob: + """OTFAD KeyBlob: The class specifies AES key and counter initial value for specified address range. + + | typedef struct KeyBlob + | { + | unsigned char key[kAesKeySizeBytes]; // 16 bytes, 128-bits, KEY[A15...A00] + | unsigned char ctr[kCtrSizeBytes]; // 8 bytes, 64-bits, CTR[C7...C0] + | unsigned int srtaddr; // region start, SRTADDR[31 - 10] + | unsigned int endaddr; // region end, ENDADDR[31 - 10]; lowest three bits are used as flags + | // end of 32-byte area covered by CRC + | unsigned int zero_fill; // zeros + | unsigned int key_blob_crc32; // crc32 over 1st 32-bytes + | // end of 40 byte (5*64-bit) key blob data + | unsigned char expanded_wrap_data[8]; // 8 bytes, used for wrap expanded data + | // end of 48 byte (6*64-bit) wrap data + | unsigned char unused_filler[16]; // unused fill to 64 bytes + | } keyblob_t; + """ + + _START_ADDR_MASK = 0x400 - 1 + # Region addresses are modulo 1024 + # The address ends with RO, ADE, VLD bits. From this perspective, only + # bits [9:3] must be set to 1. The rest is configurable. + _END_ADDR_MASK = 0x3F8 + + # Key flags mask: RO, ADE, VLD + _KEY_FLAG_MASK = 0x07 + # This field signals that the entire set of context registers (CTXn_KEY[0-3], CTXn_CTR[0-1], + # CTXn_RGD_W[0-1] are read-only and cannot be modified. This field is sticky and remains + # asserted until the next system reset. SR[RRAM] provides another level of register access + # control and is independent of the RO indicator. + KEY_FLAG_READ_ONLY = 0x4 + # AES Decryption Enable: For accesses hitting in a valid context, this bit indicates if the fetched data is to be + # decrypted or simply bypassed. + KEY_FLAG_ADE = 0x2 + # Valid: This field signals if the context is valid or not. + KEY_FLAG_VLD = 0x1 + + # key length in bytes + KEY_SIZE = 16 + # counter length in bytes + CTR_SIZE = 8 + # len of counter init value for export + _EXPORT_CTR_IV_SIZE = 8 + # this constant seems to be fixed for SB2.1 + _EXPORT_NBLOCKS_5 = 5 + # binary export size + _EXPORT_KEY_BLOB_SIZE = 64 + # QSPI image alignment length, 512 is supposed to be the safe alignment level for any QSPI device + # this means that all QSPI images generated by this tool will be sizes of multiple 512 + _IMAGE_ALIGNMENT = 512 + # Encryption block size + _ENCRYPTION_BLOCK_SIZE = 16 + + def __init__( + self, + start_addr: int, + end_addr: int, + key: Optional[bytes] = None, + counter_iv: Optional[bytes] = None, + key_flags: int = KEY_FLAG_VLD | KEY_FLAG_ADE, + # for testing + zero_fill: Optional[bytes] = None, + crc: Optional[bytes] = None, + ): + """Constructor. + + :param start_addr: start address of the region + :param end_addr: end address of the region + :param key_flags: see KEY_FLAG_xxx constants; default flags: RO = 0, ADE = 1, VLD = 1 + :param key: optional AES key; None to use random value + :param counter_iv: optional counter init value for AES; None to use random value + :param binaries: optional data chunks of this key blob + :param zero_fill: optional value for zero_fill (for testing only); None to use random value (recommended) + :param crc: optional value for unused CRC fill (for testing only); None to use random value (recommended) + :raises SPSDKError: Start or end address are not aligned + :raises SPSDKError: When there is invalid key + :raises SPSDKError: When there is invalid start/end address + :raises SPSDKError: When key_flags exceeds mask + """ + if key is None: + key = random_bytes(self.KEY_SIZE) + if counter_iv is None: + counter_iv = random_bytes(self.CTR_SIZE) + if (len(key) != self.KEY_SIZE) and (len(counter_iv) != self.CTR_SIZE): + raise SPSDKError("Invalid key") + if start_addr < 0 or start_addr > end_addr or end_addr > 0xFFFFFFFF: + raise SPSDKError("Invalid start/end address") + if key_flags & ~self._KEY_FLAG_MASK != 0: + raise SPSDKError(f"key_flags exceeds mask {hex(self._KEY_FLAG_MASK)}") + if (start_addr & self._START_ADDR_MASK) != 0: + raise SPSDKError( + f"Start address must be aligned to {hex(self._START_ADDR_MASK + 1)} boundary" + ) + # if (end_addr & self._END_ADDR_MASK) != self._END_ADDR_MASK: + # raise SPSDKError(f"End address must be aligned to {hex(self._END_ADDR_MASK)} boundary") + self.key = key + self.ctr_init_vector = counter_iv + self.start_addr = start_addr + self.end_addr = end_addr + self.key_flags = key_flags + self.zero_fill = zero_fill + self.crc_fill = crc + + def __str__(self) -> str: + """Text info about the instance.""" + msg = "" + msg += f"Key: {self.key.hex()}\n" + msg += f"Counter IV: {self.ctr_init_vector.hex()}\n" + msg += f"Start Addr: {hex(self.start_addr)}\n" + msg += f"End Addr: {hex(self.end_addr)}\n" + return msg + + def plain_data(self) -> bytes: + """Plain data for selected key range. + + :return: key blob exported into binary form (serialization) + :raises SPSDKError: Invalid value of zero fill parameter + :raises SPSDKError: Invalid value crc + :raises SPSDKError: Invalid length binary data + """ + result = bytes() + result += self.key + result += self.ctr_init_vector + result += pack(" bytes: + """Creates key wrap for the key blob. + + :param kek: key to encode; 16 bytes long + :param iv: counter initialization vector; 8 bytes; optional, OTFAD uses empty init value + :param byte_swap_cnt: Encrypted keyblob reverse byte count, 0 means NO reversing is enabled + :return: Serialized key blob + :raises SPSDKError: If any parameter is not valid + :raises SPSDKError: If length of kek is not valid + :raises SPSDKError: If length of data is not valid + """ + if isinstance(kek, str): + kek = bytes.fromhex(kek) + if len(kek) != 16: + raise SPSDKError("Invalid length of kek") + if len(iv) != self._EXPORT_CTR_IV_SIZE: + raise SPSDKError("Invalid length of initialization vector") + n = self._EXPORT_NBLOCKS_5 + plaintext = self.plain_data() # input data to be encrypted + if len(plaintext) < n * 8: + raise SPSDKError("Invalid length of data to be encrypted") + + blobs = bytes() + wrap = aes_key_wrap(kek, plaintext[:40]) + if byte_swap_cnt > 0: + for i in range(0, len(wrap), byte_swap_cnt): + blobs += wrap[i : i + byte_swap_cnt][::-1] + else: + blobs += wrap + + return align_block( + blobs, self._EXPORT_KEY_BLOB_SIZE, padding=0 + ) # align to 64 bytes (0 padding) + + def _get_ctr_nonce(self) -> bytes: + """Get the counter initial value for image encryption. + + :return: counter bytes + :raises SPSDKError: If length of counter is not valid + """ + # CTRn_x[127-0] = {CTR_W0_x[C0...C3], // 32 bits of pre-programmed CTR + # CTR_W1_x[C4...C7], // another 32 bits of CTR + # CTR_W0_x[C0...C3] ^ CTR_W1_x[C4...C7], // exclusive-OR of CTR values + # systemAddress[31-4], 0000b // 0-modulo-16 system address */ + + if len(self.ctr_init_vector) != 8: + raise SPSDKError("Invalid length of counter init") + + result = bytearray(16) + result[:4] = self.ctr_init_vector[:4] + result[4:8] = self.ctr_init_vector[4:] + for i in range(0, 4): + result[8 + i] = self.ctr_init_vector[0 + i] ^ self.ctr_init_vector[4 + i] + + # result[15:12] = start_addr as a counter; nonce has these bytes zero and value passes as counter init value + + return bytes(result) + + def contains_addr(self, addr: int) -> bool: + """Whether key blob contains specified address. + + :param addr: to be tested + :return: True if yes, False otherwise + """ + return self.start_addr <= addr <= self.end_addr + + def matches_range(self, image_start: int, image_end: int) -> bool: + """Whether key blob matches address range of the image to be encrypted. + + :param image_start: start address of the image + :param image_end: last address of the image + :return: True if yes, False otherwise + """ + return self.contains_addr(image_start) and self.contains_addr(image_end) + + def encrypt_image( + self, + base_address: int, + data: bytes, + byte_swap: bool, + counter_value: Optional[int] = None, + ) -> bytes: + """Encrypt specified data. + + :param base_address: of the data in target memory; must be >= self.start_addr + :param data: to be encrypted (e.g. plain image); base_address + len(data) must be <= self.end_addr + :param byte_swap: this probably depends on the flash device, how bytes are organized there + :param counter_value: Optional counter value, if not specified start address of keyblob will be used + :return: encrypted data + :raises SPSDKError: If start address is not valid + """ + if base_address % 16 != 0: + raise SPSDKError( + "Invalid start address" + ) # Start address has to be 16 byte aligned + data = align_block(data, self._ENCRYPTION_BLOCK_SIZE) # align data length + data_len = len(data) + + # check start and end addresses + # Support dual image boot, do not raise exception + if not self.matches_range(base_address, base_address + data_len - 1): + logger.warning( + f"Image address range is not within key blob: " + f"{hex(self.start_addr)}-{hex(self.end_addr)}." + " Ignore this if flash remap feature is used" + ) + result = bytes() + + if not counter_value: + counter_value = self.start_addr + + counter = Counter( + self._get_ctr_nonce(), + ctr_value=counter_value, + ctr_byteorder_encoding=Endianness.BIG, + ) + + for index in range(0, data_len, 16): + # prepare data in byte order + if byte_swap: + # swap 8 bytes + swap 8 bytes + data_2_encr = ( + data[-data_len + index + 7 : -data_len + index - 1 : -1] + + data[-data_len + index + 15 : -data_len + index + 7 : -1] + ) + else: + data_2_encr = data[index : index + 16] + # encrypt + encr_data = aes_ctr_encrypt(self.key, data_2_encr, counter.value) + # fix byte order in result + if byte_swap: + result += ( + encr_data[-9:-17:-1] + encr_data[-1:-9:-1] + ) # swap 8 bytes + swap 8 bytes + else: + result += encr_data + # update counter for encryption + counter.increment(16) + + if len(result) != data_len: + raise SPSDKError("Invalid length of encrypted data") + return bytes(result) + + @property + def is_encrypted(self) -> bool: + """Get the required encryption or not. + + :return: True if blob is encrypted, False otherwise. + """ + return (bool)( + (self.key_flags & (self.KEY_FLAG_ADE | self.KEY_FLAG_VLD)) + == (self.KEY_FLAG_ADE | self.KEY_FLAG_VLD) + ) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/rkht.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/rkht.py new file mode 100644 index 00000000..c27aa719 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/crypto/rkht.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2022-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""The module provides support for Root Key Hash table.""" + +import logging +import math +from abc import abstractmethod +from typing import List, Optional, Sequence, Union + +from typing_extensions import Self + +from ...crypto.certificate import Certificate +from ...crypto.hash import EnumHashAlgorithm, get_hash, get_hash_length +from ...crypto.keys import PrivateKey, PublicKey, PublicKeyEcc, PublicKeyRsa +from ...crypto.utils import extract_public_key, extract_public_key_from_data +from ...exceptions import SPSDKError +from ...utils.misc import Endianness + +logger = logging.getLogger(__name__) + + +class RKHT: + """Root Key Hash Table class.""" + + def __init__(self, rkh_list: List[bytes]) -> None: + """Initialization of Root Key Hash Table class. + + :param rkh_list: List of Root Key Hashes + """ + if len(rkh_list) > 4: + raise SPSDKError("Number of Root Key Hashes can not be larger than 4.") + self.rkh_list = rkh_list + + @classmethod + def from_keys( + cls, + keys: Sequence[ + Union[str, bytes, bytearray, PublicKey, PrivateKey, Certificate] + ], + password: Optional[str] = None, + search_paths: Optional[List[str]] = None, + ) -> Self: + """Create RKHT from list of keys. + + :param keys: List of public keys/certificates/private keys/bytes + :param password: Optional password to open secured private keys, defaults to None + :param search_paths: List of paths where to search for the file, defaults to None + """ + public_keys = ( + [cls.convert_key(x, password, search_paths=search_paths) for x in keys] + if keys + else [] + ) + if not all(isinstance(x, type(public_keys[0])) for x in public_keys): + raise SPSDKError("RKHT must contains all keys of a same instances.") + if not all( + cls._get_hash_algorithm(x) == cls._get_hash_algorithm(public_keys[0]) + for x in public_keys + ): + raise SPSDKError("RKHT must have same hash algorithm for all keys.") + + rotk_hashes = [cls._calc_key_hash(key) for key in public_keys] + return cls(rotk_hashes) + + @abstractmethod + def rkth(self) -> bytes: + """Root Key Table Hash. + + :return: Hash of hashes of public keys. + """ + + @staticmethod + def _get_hash_algorithm(key: PublicKey) -> EnumHashAlgorithm: + """Get hash algorithm output size for the key. + + :param key: Key to get hash. + :raises SPSDKError: Invalid kye type. + :return: Size in bits of hash. + """ + if isinstance(key, PublicKeyEcc): + return EnumHashAlgorithm.from_label(f"sha{key.key_size}") + + if isinstance(key, PublicKeyRsa): + # In case of RSA keys, hash is always SHA-256, regardless of the key length + return EnumHashAlgorithm.SHA256 + + raise SPSDKError("Unsupported key type to load.") + + @property + def hash_algorithm(self) -> EnumHashAlgorithm: + """Used hash algorithm name.""" + if not len(self.rkh_list) > 0: + raise SPSDKError("Unknown hash algorighm name. No root key hashes.") + return EnumHashAlgorithm.from_label(f"sha{self.hash_algorithm_size}") + + @property + def hash_algorithm_size(self) -> int: + """Used hash algorithm size in bites.""" + if not len(self.rkh_list) > 0: + raise SPSDKError("Unknown hash algorithm size. No public keys provided.") + return len(self.rkh_list[0]) * 8 + + @staticmethod + def _calc_key_hash( + public_key: PublicKey, + algorithm: Optional[EnumHashAlgorithm] = None, + ) -> bytes: + """Calculate a hash out of public key's exponent and modulus in RSA case, X/Y in EC. + + :param public_key: List of public keys to compute hash from. + :param sha_width: Used hash algorithm. + :raises SPSDKError: Unsupported public key type + :return: Computed hash. + """ + n_1 = 0 + n_2 = 0 + if isinstance(public_key, PublicKeyRsa): + n_1 = public_key.e + n1_len = math.ceil(n_1.bit_length() / 8) + n_2 = public_key.n + n2_len = math.ceil(n_2.bit_length() / 8) + elif isinstance(public_key, PublicKeyEcc): + n_1 = public_key.y + n_2 = public_key.x + n1_len = n2_len = public_key.coordinate_size + else: + raise SPSDKError(f"Unsupported key type: {type(public_key)}") + + n1_bytes = n_1.to_bytes(n1_len, Endianness.BIG.value) + n2_bytes = n_2.to_bytes(n2_len, Endianness.BIG.value) + + algorithm = algorithm or RKHT._get_hash_algorithm(public_key) + return get_hash(n2_bytes + n1_bytes, algorithm=algorithm) + + @staticmethod + def convert_key( + key: Union[str, bytes, bytearray, PublicKey, PrivateKey, Certificate], + password: Optional[str] = None, + search_paths: Optional[List[str]] = None, + ) -> PublicKey: + """Convert practically whole input that could hold Public key into public key. + + :param key: Public key in Certificate/Private key, Public key as a path to file, + loaded bytes or supported class. + :param password: Optional password to open secured private keys, defaults to None. + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Invalid kye type. + :return: Public Key object. + """ + if isinstance(key, PublicKey): + return key + + if isinstance(key, PrivateKey): + return key.get_public_key() + + if isinstance(key, Certificate): + return key.get_public_key() + + if isinstance(key, str): + return extract_public_key(key, password, search_paths=search_paths) + + if isinstance(key, (bytes, bytearray)): + return extract_public_key_from_data(key, password) + + raise SPSDKError("RKHT: Unsupported key to load.") + + +class RKHTv1(RKHT): + """Root Key Hash Table class for cert block v1.""" + + RKHT_SIZE = 4 + RKH_SIZE = 32 + + def __init__( + self, + rkh_list: List[bytes], + ) -> None: + """Initialization of Root Key Hash Table class. + + :param rkh_list: List of Root Key Hashes + """ + for key_hash in rkh_list: + if len(key_hash) != self.RKH_SIZE: + raise SPSDKError(f"Invalid key hash size: {len(key_hash)}") + super().__init__(rkh_list) + + @property + def hash_algorithm(self) -> EnumHashAlgorithm: + """Used Hash algorithm name.""" + return EnumHashAlgorithm.SHA256 + + def export(self) -> bytes: + """Export RKHT as bytes.""" + rotk_table = b"" + for i in range(self.RKHT_SIZE): + if i < len(self.rkh_list) and self.rkh_list[i]: + rotk_table += self.rkh_list[i] + else: + rotk_table += bytes(self.RKH_SIZE) + if len(rotk_table) != self.RKH_SIZE * self.RKHT_SIZE: + raise SPSDKError("Invalid length of data.") + return rotk_table + + @classmethod + def parse(cls, rkht: bytes) -> Self: + """Parse Root Key Hash Table into RKHTv1 object. + + :param rkht: Valid RKHT table + """ + rotkh_len = len(rkht) // cls.RKHT_SIZE + offset = 0 + key_hashes = [] + for _ in range(cls.RKHT_SIZE): + key_hashes.append(rkht[offset : offset + rotkh_len]) + offset += rotkh_len + return cls(key_hashes) + + def rkth(self) -> bytes: + """Root Key Table Hash. + + :return: Hash of Hashes of public key. + """ + rotkh = get_hash(self.export(), self.hash_algorithm) + return rotkh + + def set_rkh(self, index: int, rkh: bytes) -> None: + """Set Root Key Hash with index. + + :param index: Index in the hash table + :param rkh: Root Key Hash to be set + """ + if index > 3: + raise SPSDKError("Key hash can not be larger than 3.") + if self.rkh_list and len(rkh) != len(self.rkh_list[0]): + raise SPSDKError("Root Key Hash must be the same size as other hashes.") + # fill the gap with zeros if the keys are not consecutive + for idx in range(index + 1): + if len(self.rkh_list) < idx + 1: + self.rkh_list.append(bytes(self.RKH_SIZE)) + assert len(self.rkh_list) <= 4 + self.rkh_list[index] = rkh + + +class RKHTv21(RKHT): + """Root Key Hash Table class for cert block v2.1.""" + + def export(self) -> bytes: + """Export RKHT as bytes.""" + hash_table = bytes() + if len(self.rkh_list) > 1: + hash_table = bytearray().join(self.rkh_list) + return hash_table + + @classmethod + def parse(cls, rkht: bytes, hash_algorithm: EnumHashAlgorithm) -> Self: + """Parse Root Key Hash Table into RKHTv21 object. + + :param rkht: Valid RKHT table + :param hash_algorithm: Hash algorithm to be used + """ + rkh_len = get_hash_length(hash_algorithm) + if len(rkht) % rkh_len != 0: + raise SPSDKError( + f"The length of Root Key Hash Table does not match the hash algorithm {hash_algorithm}" + ) + offset = 0 + rkh_list = [] + rkht_size = len(rkht) // rkh_len + for _ in range(rkht_size): + rkh_list.append(rkht[offset : offset + rkh_len]) + offset += rkh_len + return cls(rkh_list) + + def rkth(self) -> bytes: + """Root Key Table Hash. + + :return: Hash of Hashes of public key. + """ + if not self.rkh_list: + logger.debug("RKHT has no records.") + return bytes() + if len(self.rkh_list) == 1: + rotkh = self.rkh_list[0] + else: + rotkh = get_hash(self.export(), self.hash_algorithm) + return rotkh diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/database.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/database.py new file mode 100644 index 00000000..8e6c6978 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/database.py @@ -0,0 +1,866 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2022-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""Module to manage used databases in SPSDK.""" + +import atexit +import logging +import os +import pickle +import shutil +from copy import copy, deepcopy +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + +import platformdirs +from typing_extensions import Self + +from .. import SPSDK_CACHE_DISABLED, SPSDK_DATA_FOLDER +from ..crypto.hash import EnumHashAlgorithm, Hash, get_hash +from ..exceptions import SPSDKError, SPSDKValueError +from ..utils.misc import ( + deep_update, + find_first, + load_configuration, + value_to_bool, + value_to_int, +) + +logger = logging.getLogger(__name__) + + +class SPSDKErrorMissingDevice(SPSDKError): + """Missing device in database.""" + + +class Features: + """Features dataclass represents a single device revision.""" + + def __init__( + self, + name: str, + is_latest: bool, + device: "Device", + features: Dict[str, Dict[str, Any]], + ) -> None: + """Constructor of revision. + + :param name: Revision name + :param is_latest: Mark if this revision is latest. + :param device: Reference to its device + :param features: Features + """ + self.name = name + self.is_latest = is_latest + self.device = device + self.features = features + + def check_key(self, feature: str, key: Union[List[str], str]) -> bool: + """Check if the key exist in database. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :raises SPSDKValueError: Unsupported feature + :return: True if exist False otherwise + """ + if feature not in self.features: + raise SPSDKValueError(f"Unsupported feature: '{feature}'") + db_dict = self.features[feature] + + if isinstance(key, list): + while len(key) > 1: + act_key = key.pop(0) + if act_key not in db_dict or not isinstance(db_dict[act_key], dict): + return False + db_dict = db_dict[act_key] + key = key[0] + + assert isinstance(key, str) + return key in db_dict + + def get_value( + self, feature: str, key: Union[List[str], str], default: Any = None + ) -> Any: + """Get value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :raises SPSDKValueError: Unsupported feature + :raises SPSDKValueError: Unavailable item in feature + :return: Value from the feature + """ + if feature not in self.features: + raise SPSDKValueError(f"Unsupported feature: '{feature}'") + db_dict = self.features[feature] + + if isinstance(key, list): + while len(key) > 1: + act_key = key.pop(0) + if act_key not in db_dict or not isinstance(db_dict[act_key], dict): + raise SPSDKValueError(f"Non-existing nested group: '{act_key}'") + db_dict = db_dict[act_key] + key = key[0] + + assert isinstance(key, str) + val = db_dict.get(key, default) + + if val is None: + raise SPSDKValueError(f"Unavailable item '{key}' in feature '{feature}'") + return val + + def get_bool( + self, feature: str, key: Union[List[str], str], default: Optional[bool] = None + ) -> bool: + """Get Boolean value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: Boolean value from the feature + """ + val = self.get_value(feature, key, default) + return value_to_bool(val) + + def get_int( + self, feature: str, key: Union[List[str], str], default: Optional[int] = None + ) -> int: + """Get Integer value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: Integer value from the feature + """ + val = self.get_value(feature, key, default) + return value_to_int(val) + + def get_str( + self, feature: str, key: Union[List[str], str], default: Optional[str] = None + ) -> str: + """Get String value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: String value from the feature + """ + val = self.get_value(feature, key, default) + assert isinstance(val, str) + return val + + def get_list( + self, feature: str, key: Union[List[str], str], default: Optional[List] = None + ) -> List[Any]: + """Get List value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: List value from the feature + """ + val = self.get_value(feature, key, default) + assert isinstance(val, list) + return val + + def get_dict( + self, feature: str, key: Union[List[str], str], default: Optional[Dict] = None + ) -> Dict: + """Get Dictionary value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: Dictionary value from the feature + """ + val = self.get_value(feature, key, default) + assert isinstance(val, dict) + return val + + def get_file_path( + self, feature: str, key: Union[List[str], str], default: Optional[str] = None + ) -> str: + """Get File path value. + + :param feature: Feature name + :param key: Item key or key path in list like ['grp1', 'grp2', 'key'] + :param default: Default value in case of missing key + :return: File path value from the feature + """ + file_name = self.get_str(feature, key, default) + return self.device.create_file_path(file_name) + + +class Revisions(List[Features]): + """List of device revisions.""" + + def revision_names(self, append_latest: bool = False) -> List[str]: + """Get list of revisions. + + :param append_latest: Add to list also "latest" string + :return: List of all supported device version. + """ + ret = [rev.name for rev in self] + if append_latest: + ret.append("latest") + return ret + + def get(self, name: Optional[str] = None) -> Features: + """Get the revision by its name. + + If name is not specified, or equal to 'latest', then the latest revision is returned. + + :param name: The revision name. + :return: The Revision object. + """ + if name is None or name == "latest": + return self.get_latest() + return self.get_by_name(name) + + def get_by_name(self, name: str) -> Features: + """Get the required revision. + + :param name: Required revision name + :raises SPSDKValueError: Incase of invalid device or revision value. + :return: The Revision object. + """ + revision = find_first(self, lambda rev: rev.name == name) + if not revision: + raise SPSDKValueError(f"Requested revision {name} is not supported.") + return revision + + def get_latest(self) -> Features: + """Get latest revision for device. + + :raises SPSDKValueError: Incase of there is no latest revision defined. + :return: The Features object. + """ + revision = find_first(self, lambda rev: rev.is_latest) + if not revision: + raise SPSDKValueError("No latest revision has been defined.") + return revision + + +class DeviceInfo: + """Device information dataclass.""" + + def __init__( + self, + purpose: str, + web: str, + memory_map: Dict[str, Dict[str, Union[int, bool]]], + isp: Dict[str, Any], + ) -> None: + """Constructor of device information class. + + :param purpose: String description of purpose of MCU (in fact the device group) + :param web: Web page with device info + :param memory_map: Basic memory map of device + :param isp: Information regarding ISP mode + """ + self.purpose = purpose + self.web = web + self.memory_map = memory_map + self.isp = isp + + @staticmethod + def load(config: Dict[str, Any], defaults: Dict[str, Any]) -> "DeviceInfo": + """Loads the device from folder. + + :param config: The name of device. + :param defaults: Device data defaults. + :return: The Device object. + """ + data = deepcopy(defaults) + deep_update(data, config) + return DeviceInfo( + purpose=data["purpose"], + web=data["web"], + memory_map=data["memory_map"], + isp=data["isp"], + ) + + def update(self, config: Dict[str, Any]) -> None: + """Updates Device info by new configuration. + + :param config: The new Device Info configuration + """ + self.purpose = config.get("purpose", self.purpose) + self.web = config.get("web", self.web) + self.memory_map = config.get("memory_map", self.memory_map) + self.isp = config.get("isp", self.isp) + + +class Device: + """Device dataclass represents a single device.""" + + def __init__( + self, + name: str, + path: str, + latest_rev: str, + info: DeviceInfo, + device_alias: Optional["Device"] = None, + revisions: Revisions = Revisions(), + ) -> None: + """Constructor of SPSDK Device. + + :param name: Device name + :param path: Data path + :param latest_rev: latest revision name + :param device_alias: Device alias, defaults to None + :param revisions: Device revisions, defaults to Revisions() + """ + self.name = name + self.path = path + self.latest_rev = latest_rev + self.device_alias = device_alias + self.revisions = revisions + self.info = info + + @property + def features_list(self) -> List[str]: + """Get the list of device features.""" + return [str(k) for k in self.revisions.get().features.keys()] + + @staticmethod + def _load_alias( + name: str, path: str, dev_cfg: Dict[str, Any], other_devices: "Devices" + ) -> "Device": + """Loads the device from folder. + + :param name: The name of device. + :param path: Device data path. + :param dev_cfg: Already loaded configuration. + :param other_devices: Other devices used to allow aliases. + :return: The Device object. + """ + dev_cfg = load_configuration(os.path.join(path, "database.yaml")) + dev_alias_name = dev_cfg["alias"] + # Let get() function raise exception in case that device not exists in database + ret = deepcopy(other_devices.get(dev_alias_name)) + ret.name = name + ret.path = path + ret.device_alias = other_devices.get(dev_alias_name) + dev_features: Dict[str, Dict] = dev_cfg.get("features", {}) + dev_revisions: Dict[str, Dict] = dev_cfg.get("revisions", {}) + assert isinstance(dev_features, Dict) + assert isinstance(dev_revisions, Dict) + ret.latest_rev = dev_cfg.get("latest", ret.latest_rev) + # First off all update general changes in features + if dev_features: + for rev in ret.revisions: + deep_update(rev.features, dev_features) + + for rev_name, rev_updates in dev_revisions.items(): + try: + dev_rev = ret.revisions.get_by_name(rev_name) + except SPSDKValueError as exc: + # In case of newly defined revision, there must be defined alias + alias_rev = rev_updates.get("alias") + if not alias_rev: + raise SPSDKError( + f"There is missing alias key in new revision ({rev_name}) of aliased device {ret.name}" + ) from exc + dev_rev = deepcopy(ret.revisions.get_by_name(alias_rev)) + dev_rev.name = rev_name + dev_rev.is_latest = bool(ret.latest_rev == rev_name) + ret.revisions.append(dev_rev) + + # Update just same rev + rev_specific_features = rev_updates.get("features") + if rev_specific_features: + deep_update(dev_rev.features, rev_specific_features) + + if "info" in dev_cfg: + ret.info.update(dev_cfg["info"]) + + return ret + + @staticmethod + def load( + name: str, path: str, defaults: Dict[str, Any], other_devices: "Devices" + ) -> "Device": + """Loads the device from folder. + + :param name: The name of device. + :param path: Device data path. + :param defaults: Device data defaults. + :param other_devices: Other devices used to allow aliases. + :return: The Device object. + """ + dev_cfg = load_configuration(os.path.join(path, "database.yaml")) + dev_alias_name = dev_cfg.get("alias") + if dev_alias_name: + return Device._load_alias( + name=name, path=path, dev_cfg=dev_cfg, other_devices=other_devices + ) + + dev_features: Dict[str, Dict] = dev_cfg["features"] + features_defaults: Dict[str, Dict] = deepcopy(defaults["features"]) + + dev_info = DeviceInfo.load(dev_cfg["info"], defaults["info"]) + + # Get defaults and update them by device specific data set + for feature_name in dev_features: + deep_update(features_defaults[feature_name], dev_features[feature_name]) + dev_features[feature_name] = features_defaults[feature_name] + + revisions = Revisions() + dev_revisions: Dict[str, Dict] = dev_cfg["revisions"] + latest: str = dev_cfg["latest"] + if latest not in dev_revisions: + raise SPSDKError( + f"The latest revision defined in database for {name} is not in supported revisions" + ) + + ret = Device( + name=name, path=path, info=dev_info, latest_rev=latest, device_alias=None + ) + + for rev, rev_updates in dev_revisions.items(): + features = deepcopy(dev_features) + rev_specific_features = rev_updates.get("features") + if rev_specific_features: + deep_update(features, rev_specific_features) + revisions.append( + Features( + name=rev, + is_latest=bool(rev == latest), + features=features, + device=ret, + ) + ) + + ret.revisions = revisions + + return ret + + def create_file_path(self, file_name: str) -> str: + """Create File path value for this device. + + :param file_name: File name to be enriched by device path + :return: File path value for the device + """ + path = os.path.abspath(os.path.join(self.path, file_name)) + if not os.path.exists(path) and self.device_alias: + path = self.device_alias.create_file_path(file_name) + + if not os.path.exists(path): + raise SPSDKValueError(f"Non existing file ({file_name}) in database") + return path + + +class Devices(List[Device]): + """List of devices.""" + + def get(self, name: str) -> Device: + """Return database device structure. + + :param name: String Key with device name. + :raises SPSDKErrorMissingDevice: In case the device with given name does not exist + :return: Dictionary device configuration structure or None: + """ + dev = find_first(self, lambda dev: dev.name == name) + if not dev: + raise SPSDKErrorMissingDevice( + f"The device with name {name} is not in the database." + ) + return dev + + @property + def devices_names(self) -> List[str]: + """Get the list of devices names.""" + return [dev.name for dev in self] + + def feature_items(self, feature: str, key: str) -> Iterator[Tuple[str, str, Any]]: + """Iter the whole database for the feature items. + + :return: Tuple of Device name, revision name and items value. + """ + for device in self: + if not feature in device.features_list: + continue + for rev in device.revisions: + value = rev.features[feature].get(key) + if value is None: + raise SPSDKValueError( + f"Missing item '{key}' in feature '{feature}'!" + ) + yield (device.name, rev.name, value) + + @staticmethod + def load(devices_path: str, defaults: Dict[str, Any]) -> "Devices": + """Loads the devices from SPSDK database path. + + :param devices_path: Devices data path. + :param defaults: Devices defaults data. + :return: The Devices object. + """ + devices = Devices() + uncompleted_aliases: List[os.DirEntry] = [] + for dev in os.scandir(devices_path): + if dev.is_dir(): + try: + try: + devices.append( + Device.load( + name=dev.name, + path=dev.path, + defaults=defaults, + other_devices=devices, + ) + ) + except SPSDKErrorMissingDevice: + uncompleted_aliases.append(dev) + except SPSDKError as exc: + logger.error( + f"Failed loading device '{dev.name}' into SPSDK database. Details:\n{str(exc)}" + ) + while uncompleted_aliases: + prev_len = len(uncompleted_aliases) + for dev in copy(uncompleted_aliases): + try: + devices.append( + Device.load( + name=dev.name, + path=dev.path, + defaults=defaults, + other_devices=devices, + ) + ) + uncompleted_aliases.remove(dev) + except SPSDKErrorMissingDevice: + pass + if prev_len == len(uncompleted_aliases): + raise SPSDKError("Cannot load all alias devices in database.") + return devices + + +class Database: + """Class that helps manage used databases in SPSDK.""" + + def __init__(self, path: str) -> None: + """Register Configuration class constructor. + + :param path: The path to configuration JSON file. + """ + self._cfg_cache: Dict[str, Dict[str, Any]] = {} + self.path = path + self.common_folder_path = os.path.join(path, "common") + self.devices_folder_path = os.path.join(path, "devices") + self._defaults = load_configuration( + os.path.join(self.common_folder_path, "database_defaults.yaml") + ) + self._devices = Devices.load( + devices_path=self.devices_folder_path, defaults=self._defaults + ) + + # optional Database hash that could be used for identification of consistency + self.db_hash = bytes() + + @property + def devices(self) -> Devices: + """Get the list of devices stored in the database.""" + return self._devices + + def get_feature_list(self, dev_name: Optional[str] = None) -> List[str]: + """Get features list. + + If device is not used, the whole list of SPSDK features is returned + + :param dev_name: Device name, defaults to None + :returns: List of features. + """ + if dev_name: + return self.devices.get(dev_name).features_list + + default_features: Dict[str, Dict] = self._defaults["features"] + return [str(k) for k in default_features.keys()] + + def get_defaults(self, feature: str) -> Dict[str, Any]: + """Gets feature defaults. + + :param feature: Feature name + :return: Dictionary with feature defaults. + """ + features = self._defaults["features"] + if feature not in features: + raise SPSDKValueError(f"Invalid feature requested: {feature}") + + return deepcopy(features[feature]) + + def get_device_features( + self, + device: str, + revision: str = "latest", + ) -> Features: + """Get device features database. + + :param device: The device name. + :param revision: The revision of the silicon. + :raises SPSDKValueError: Unsupported feature + :return: The feature data. + """ + dev = self.devices.get(device) + return dev.revisions.get(revision) + + def get_schema_file(self, feature: str) -> Dict[str, Any]: + """Get JSON Schema file name for the requested feature. + + :param feature: Requested feature. + :return: Loaded dictionary of JSON Schema file. + """ + filename = os.path.join(SPSDK_DATA_FOLDER, "jsonschemas", f"sch_{feature}.yaml") + return self.load_db_cfg_file(filename) + + def load_db_cfg_file(self, filename: str) -> Dict[str, Any]: + """Return load database config file (JSON/YAML). Use SingleTon behavior. + + :param filename: Path to config file. + :raises SPSDKError: Invalid config file. + :return: Loaded file in dictionary. + """ + abs_path = os.path.abspath(filename) + if abs_path not in self._cfg_cache: + try: + cfg = load_configuration(abs_path) + except SPSDKError as exc: + raise SPSDKError(f"Invalid configuration file. {str(exc)}") from exc + self._cfg_cache[abs_path] = cfg + + return deepcopy(self._cfg_cache[abs_path]) + + def get_devices_with_feature( + self, feature: str, sub_keys: Optional[List[str]] = None + ) -> List[str]: + """Get the list of all device names that supports requested feature. + + :param feature: Name of feature + :param sub_keys: Optional sub keys to specify the nested dictionaries that feature needs to has to be counted + :returns: List of devices that supports requested feature. + """ + + def check_sub_keys(d: dict, sub_keys: List[str]) -> bool: + key = sub_keys.pop(0) + if not key in d: + return False + + if len(sub_keys) == 0: + return True + + nested = d[key] + if not isinstance(nested, dict): + return False + return check_sub_keys(nested, sub_keys) + + devices = [] + for device in self.devices: + if feature in device.features_list: + if sub_keys and not check_sub_keys( + device.revisions.get_latest().features[feature], copy(sub_keys) + ): + continue + devices.append(device.name) + + devices.sort() + return devices + + def __hash__(self) -> int: + """Hash function of the database.""" + return hash(len(self._cfg_cache)) + + +class DatabaseManager: + """Main SPSDK database manager.""" + + _instance = None + _db: Optional[Database] = None + _db_hash: int = 0 + _db_cache_file_name = "" + + @staticmethod + def get_cache_filename() -> Tuple[str, str]: + """Get database cache folder and file name. + + :return: Tuple of cache path and database file name. + """ + data_folder = SPSDK_DATA_FOLDER.lower() + cache_name = ( + "db_" + + get_hash(data_folder.encode(), algorithm=EnumHashAlgorithm.SHA1)[:6].hex() + + ".cache" + ) + cache_path = platformdirs.user_cache_dir(appname="spsdk", version="2.1.0") + return (cache_path, os.path.join(cache_path, cache_name)) + + @staticmethod + def clear_cache() -> None: + """Clear SPSDK cache.""" + path, _ = DatabaseManager.get_cache_filename() + shutil.rmtree(path) + + @classmethod + def _get_database(cls) -> Database: + """Get database and count with cache.""" + if SPSDK_CACHE_DISABLED: + DatabaseManager.clear_cache() + return Database(SPSDK_DATA_FOLDER) + + db_hash = DatabaseManager.get_db_hash(SPSDK_DATA_FOLDER) + + if os.path.exists(cls._db_cache_file_name): + try: + with open(cls._db_cache_file_name, mode="rb") as f: + loaded_db = pickle.load(f) + assert isinstance(loaded_db, Database) + if db_hash == loaded_db.db_hash: + logger.debug( + f"Loaded database from cache: {cls._db_cache_file_name}" + ) + return loaded_db + # if the hash is not same clear cache and make a new one + logger.debug( + f"Existing cached DB ({cls._db_cache_file_name}) has invalid hash" + ) + DatabaseManager.clear_cache() + except Exception as exc: + logger.debug(f"Cannot load database cache: {str(exc)}") + + db = Database(SPSDK_DATA_FOLDER) + db.db_hash = db_hash + try: + os.makedirs(cls._db_cache_folder_name, exist_ok=True) + with open(cls._db_cache_file_name, mode="wb") as f: + pickle.dump(db, f, pickle.HIGHEST_PROTOCOL) + logger.debug(f"Created database cache: {cls._db_cache_file_name}") + except Exception as exc: + logger.debug(f"Cannot store database cache: {str(exc)}") + return db + + def __new__(cls) -> Self: + """Manage SPSDK Database as a singleton class. + + :return: SPSDK_Database object + """ + if cls._instance: + return cls._instance + cls._instance = super(DatabaseManager, cls).__new__(cls) + ( + cls._db_cache_folder_name, + cls._db_cache_file_name, + ) = DatabaseManager.get_cache_filename() + cls._db = cls._instance._get_database() + cls._db_hash = hash(cls._db) + return cls._instance + + @staticmethod + def get_db_hash(path: str) -> bytes: + """Get the real db hash.""" + hash_obj = Hash(EnumHashAlgorithm.SHA1) + for root, dirs, files in os.walk(path): + for _dir in dirs: + hash_obj.update(DatabaseManager.get_db_hash(os.path.join(root, _dir))) + for file in files: + if os.path.splitext(file)[1] in [".json", ".yaml"]: + stat = os.stat(os.path.join(root, file)) + hash_obj.update_int(stat.st_mtime_ns) + hash_obj.update_int(stat.st_ctime_ns) + hash_obj.update_int(stat.st_size) + + return hash_obj.finalize() + + @property + def db(self) -> Database: + """Get Database.""" + db = type(self)._db + assert isinstance(db, Database) + return db + + # """List all SPSDK supported features""" + COMM_BUFFER = "comm_buffer" + # BLHOST = "blhost" + CERT_BLOCK = "cert_block" + DAT = "dat" + MBI = "mbi" + HAB = "hab" + AHAB = "ahab" + SIGNED_MSG = "signed_msg" + PFR = "pfr" + IFR = "ifr" + BOOTABLE_IMAGE = "bootable_image" + FCB = "fcb" + XMCD = "xmcd" + BEE = "bee" + IEE = "iee" + OTFAD = "otfad" + SB21 = "sb21" + SB31 = "sb31" + SBX = "sbx" + SHADOW_REGS = "shadow_regs" + DEVHSM = "devhsm" + TP = "tp" + TZ = "tz" + ELE = "ele" + MEMCFG = "memcfg" + WPC = "wpc" + + +@atexit.register +def on_delete() -> None: + """Delete method of SPSDK database. + + The exit method is used to update cache in case it has been changed. + """ + if SPSDK_CACHE_DISABLED: + return + if DatabaseManager._db_hash != hash(DatabaseManager._db): + try: + with open(DatabaseManager._db_cache_file_name, mode="wb") as f: + logger.debug(f"Updating cache: {DatabaseManager._db_cache_file_name}") + pickle.dump(DatabaseManager().db, f, pickle.HIGHEST_PROTOCOL) + except FileNotFoundError: + pass + + +def get_db( + device: str, + revision: str = "latest", +) -> Features: + """Get device feature database. + + :param device: The device name. + :param revision: The revision of the silicon. + :return: The feature data. + """ + return DatabaseManager().db.get_device_features(device, revision) + + +def get_device(device: str) -> Device: + """Get device database object. + + :param device: The device name. + :return: The device data. + """ + return DatabaseManager().db.devices.get(device) + + +def get_families(feature: str, sub_keys: Optional[List[str]] = None) -> List[str]: + """Get the list of all family names that supports requested feature. + + :param feature: Name of feature + :param sub_keys: Optional sub keys to specify the nested dictionaries that feature needs to has to be counted + :returns: List of devices that supports requested feature. + """ + return DatabaseManager().db.get_devices_with_feature(feature, sub_keys) + + +def get_schema_file(feature: str) -> Dict[str, Any]: + """Get JSON Schema file name for the requested feature. + + :param feature: Requested feature. + :return: Loaded dictionary of JSON Schema file. + """ + return DatabaseManager().db.get_schema_file(feature) diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/exceptions.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/exceptions.py new file mode 100644 index 00000000..95ed2013 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/exceptions.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2021-2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module provides exceptions for SPSDK utilities.""" +from ..exceptions import SPSDKError + + +class SPSDKRegsError(SPSDKError): + """General Error group for utilities SPSDK registers module.""" + + +class SPSDKRegsErrorRegisterGroupMishmash(SPSDKRegsError): + """Register Group inconsistency problem.""" + + +class SPSDKRegsErrorRegisterNotFound(SPSDKRegsError): + """Register has not been found.""" + + +class SPSDKRegsErrorBitfieldNotFound(SPSDKRegsError): + """Bitfield has not been found.""" + + +class SPSDKRegsErrorEnumNotFound(SPSDKRegsError): + """Enum has not been found.""" + + +class SPSDKTimeoutError(TimeoutError, SPSDKError): + """SPSDK Timeout.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/__init__.py new file mode 100644 index 00000000..7a3c4d88 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Device Interfaces.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/commands.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/commands.py new file mode 100644 index 00000000..232ec055 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/commands.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Generic commands implementation.""" +from abc import ABC, abstractmethod + + +class CmdResponseBase(ABC): + """Response base format class.""" + + @abstractmethod + def __str__(self) -> str: + """Get object info.""" + + @property + @abstractmethod + def value(self) -> int: + """Return a integer representation of the response.""" + + +class CmdPacketBase(ABC): + """COmmand protocol base.""" + + @abstractmethod + def to_bytes(self, padding: bool = True) -> bytes: + """Serialize CmdPacket into bytes. + + :param padding: If True, add padding to specific size + :return: Serialized object into bytes + """ diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/__init__.py new file mode 100644 index 00000000..28f99a1b --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module implementing the low level device.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/base.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/base.py new file mode 100644 index 00000000..8724cfa6 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/base.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Low level device base class.""" +import logging +from abc import ABC, abstractmethod +from types import TracebackType +from typing import Optional, Type + +from typing_extensions import Self + +logger = logging.getLogger(__name__) + + +class DeviceBase(ABC): + """Device base class.""" + + def __enter__(self) -> Self: + self.open() + return self + + def __exit__( + self, + exception_type: Optional[Type[Exception]] = None, + exception_value: Optional[Exception] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + @property + @abstractmethod + def is_opened(self) -> bool: + """Indicates whether interface is open.""" + + @abstractmethod + def open(self) -> None: + """Open the interface.""" + + @abstractmethod + def close(self) -> None: + """Close the interface.""" + + @abstractmethod + def read(self, length: int, timeout: Optional[int] = None) -> bytes: + """Read data from the device. + + :param length: Length of data to be read + :param timeout: Read timeout to be applied + """ + + @abstractmethod + def write(self, data: bytes, timeout: Optional[int] = None) -> None: + """Write data to the device. + + :param data: Data to be written + :param timeout: Read timeout to be applied + """ + + @property + @abstractmethod + def timeout(self) -> int: + """Timeout property.""" + + @timeout.setter + @abstractmethod + def timeout(self, value: int) -> None: + """Timeout property setter.""" + + @abstractmethod + def __str__(self) -> str: + """Return string containing information about the interface.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/usb_device.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/usb_device.py new file mode 100644 index 00000000..0f861227 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/device/usb_device.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Low level Hid device.""" +import logging +from typing import Dict, List, Optional + +import hid +from typing_extensions import Self + +from ....exceptions import SPSDKConnectionError, SPSDKError +from ....utils.exceptions import SPSDKTimeoutError +from ....utils.interfaces.device.base import DeviceBase +from ....utils.misc import get_hash +from ....utils.usbfilter import NXPUSBDeviceFilter, USBDeviceFilter + +logger = logging.getLogger(__name__) + + +class UsbDevice(DeviceBase): + """USB device class.""" + + def __init__( + self, + vid: Optional[int] = None, + pid: Optional[int] = None, + path: Optional[bytes] = None, + serial_number: Optional[str] = None, + vendor_name: Optional[str] = None, + product_name: Optional[str] = None, + interface_number: Optional[int] = None, + timeout: Optional[int] = None, + ) -> None: + """Initialize the USB interface object.""" + self.vid = vid or 0 + self.pid = pid or 0 + self.path = path or b"" + self.serial_number = serial_number or "" + self.vendor_name = vendor_name or "" + self.product_name = product_name or "" + self.interface_number = interface_number or 0 + self._timeout = timeout or 2000 + self._device: Optional[hid.device] = None + + @property + def timeout(self) -> int: + """Timeout property.""" + return self._timeout + + @timeout.setter + def timeout(self, value: int) -> None: + """Timeout property setter.""" + self._timeout = value + + @property + def is_opened(self) -> bool: + """Indicates whether device is open. + + :return: True if device is open, False othervise. + """ + return self._device is not None + + def open(self) -> None: + """Open the interface. + + :raises SPSDKError: if device is already opened + :raises SPSDKConnectionError: if the device can not be opened + """ + logger.debug(f"Opening the Interface: {str(self)}") + if self._device: + # This would get HID_DEVICE into broken state + raise SPSDKError("Can't open already opened device") + try: + self._device = hid.Device(path=self.path) + except Exception as error: + raise SPSDKConnectionError( + f"Unable to open device '{str(self)}'" + ) from error + + def close(self) -> None: + """Close the interface. + + :raises SPSDKConnectionError: if no device is available + :raises SPSDKConnectionError: if the device can not be opened + """ + logger.debug(f"Closing the Interface: {str(self)}") + if self._device: + try: + self._device.close() + except Exception as error: + raise SPSDKConnectionError( + f"Unable to close device '{str(self)}'" + ) from error + + def read(self, length: int, timeout: Optional[int] = None) -> bytes: + """Read data on the IN endpoint associated to the HID interface. + + :return: Return CmdResponse object. + :raises SPSDKConnectionError: Raises an error if device is not opened for reading + :raises SPSDKConnectionError: Raises if device is not available + :raises SPSDKConnectionError: Raises if reading fails + :raises SPSDKTimeoutError: Time-out + """ + timeout = timeout or self.timeout + if not self._device: + raise SPSDKConnectionError("Device is not opened for reading") + try: + data = self._device.read(length, timeout=timeout) + except Exception as e: + raise SPSDKConnectionError(str(e)) from e + if not data: + logger.error(f"Cannot read from HID device") + raise SPSDKTimeoutError() + return bytes(data) + + def write(self, data: bytes, timeout: Optional[int] = None) -> None: + """Send data to device. + + :param data: Data to send + :param timeout: Timeout to be used + :raises SPSDKConnectionError: Sending data to device failure + """ + timeout = timeout or self.timeout + if not self._device: + raise SPSDKConnectionError("Device is not opened for writing") + try: + bytes_written = self._device.write(data) + except Exception as e: + raise SPSDKConnectionError(str(e)) from e + if bytes_written < 0 or bytes_written < len(data): + raise SPSDKConnectionError( + f"Invalid size of written bytes has been detected: {bytes_written} != {len(data)}" + ) + + def __str__(self) -> str: + """Return information about the USB interface.""" + return ( + f"{self.product_name:s} (0x{self.vid:04X}, 0x{self.pid:04X})" + f"path={self.path!r} sn='{self.serial_number}'" + ) + + @property + def path_str(self) -> str: + """BLHost-friendly string representation of USB path.""" + return NXPUSBDeviceFilter.convert_usb_path(self.path) + + @property + def path_hash(self) -> str: + """BLHost-friendly hash of the USB path.""" + return get_hash(self.path) + + def __hash__(self) -> int: + return hash(self.path) + + @classmethod + def scan( + cls, + device_id: Optional[str] = None, + usb_devices_filter: Optional[Dict] = None, + timeout: Optional[int] = None, + ) -> List[Self]: + """Scan connected USB devices. + + :param device_id: Device identifier , , device/instance path, device name are supported + :param usb_devices_filter: Dictionary holding NXP device vid/pid {"device_name": [vid(int), pid(int)]}. + If set, only devices included in the dictionary will be scanned + :param timeout: Read/write timeout + :return: list of matching RawHid devices + """ + usb_filter = NXPUSBDeviceFilter( + usb_id=device_id, nxp_device_names=usb_devices_filter + ) + devices = cls.enumerate(usb_filter, timeout=timeout) + return devices + + @classmethod + def enumerate( + cls, usb_device_filter: USBDeviceFilter, timeout: Optional[int] = None + ) -> List[Self]: + """Get list of all connected devices which matches device_id. + + :param usb_device_filter: USBDeviceFilter object + :param timeout: Default timeout to be set + :return: List of interfaces found + """ + devices = [] + + # iterate on all devices found + for dev in hid.enumerate(): + if usb_device_filter.compare(dev) is True: + new_device = cls( + vid=dev["vendor_id"], + pid=dev["product_id"], + path=dev["path"], + vendor_name=dev["manufacturer_string"], + product_name=dev["product_string"], + interface_number=dev["interface_number"], + timeout=timeout, + ) + devices.append(new_device) + return devices diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/__init__.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/__init__.py new file mode 100644 index 00000000..e06c1a13 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Protocol base.""" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/protocol_base.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/protocol_base.py new file mode 100644 index 00000000..89102e6f --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/interfaces/protocol/protocol_base.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright 2023-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Protocol base.""" +from abc import ABC, abstractmethod +from types import ModuleType, TracebackType +from typing import Dict, List, Optional, Type, Union + +from typing_extensions import Self + +from ....exceptions import SPSDKError +from ....utils.interfaces.commands import CmdPacketBase, CmdResponseBase +from ....utils.interfaces.device.base import DeviceBase +from ....utils.plugins import PluginsManager, PluginType + + +class ProtocolBase(ABC): + """Protocol base class.""" + + device: DeviceBase + identifier: str + + def __init__(self, device: DeviceBase) -> None: + """Initialize the MbootSerialProtocol object. + + :param device: The device instance + """ + self.device = device + + def __str__(self) -> str: + return f"identifier='{self.identifier}', device={self.device}" + + def __enter__(self) -> Self: + self.open() + return self + + def __exit__( + self, + exception_type: Optional[Type[Exception]] = None, + exception_value: Optional[Exception] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + self.close() + + @abstractmethod + def open(self) -> None: + """Open the interface.""" + + @abstractmethod + def close(self) -> None: + """Close the interface.""" + + @property + @abstractmethod + def is_opened(self) -> bool: + """Indicates whether interface is open.""" + + @classmethod + @abstractmethod + def scan_from_args( + cls, + params: str, + timeout: int, + extra_params: Optional[str] = None, + ) -> List[Self]: + """Scan method.""" + + @abstractmethod + def write_command(self, packet: CmdPacketBase) -> None: + """Write command to the device. + + :param packet: Command packet to be sent + """ + + @abstractmethod + def write_data(self, data: bytes) -> None: + """Write data to the device. + + :param data: Data to be send + """ + + @abstractmethod + def read(self, length: Optional[int] = None) -> Union[CmdResponseBase, bytes]: + """Read data from device. + + :return: read data + """ + + @classmethod + def _get_interfaces(cls) -> List[Type[Self]]: + """Get list of all available interfaces.""" + cls._load_plugins() + return [ + sub_class + for sub_class in cls._get_subclasses(cls) + if getattr(sub_class, "identifier", None) + ] + + @classmethod + def get_interface(cls, identifier: str) -> Type[Self]: + """Get list of all available interfaces.""" + interface = next( + ( + iface + for iface in cls._get_interfaces() + if iface.identifier == identifier + ), + None, + ) + if not interface: + raise SPSDKError(f"Interface with identifier {identifier} does not exist.") + return interface + + @staticmethod + def _load_plugins() -> Dict[str, ModuleType]: + """Load all installed interface plugins.""" + plugins_manager = PluginsManager() + plugins_manager.load_from_entrypoints(PluginType.DEVICE_INTERFACE.label) + return plugins_manager.plugins + + @classmethod + def _get_subclasses( + cls, + base_class: Type, + ) -> List[Type[Self]]: + """Recursively find all subclasses.""" + subclasses = [] + for subclass in base_class.__subclasses__(): + subclasses.append(subclass) + subclasses.extend(cls._get_subclasses(subclass)) + return subclasses diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/misc.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/misc.py new file mode 100644 index 00000000..400efc94 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/misc.py @@ -0,0 +1,931 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# Copyright 2020-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Miscellaneous functions used throughout the SPSDK.""" +import contextlib +import hashlib +import json +import logging +import math +import os +import re +import textwrap +import time +from enum import Enum +from math import ceil +from struct import pack, unpack +from typing import ( + Any, + Callable, + Dict, + Generator, + Iterable, + Iterator, + List, + Optional, + Type, + TypeVar, + Union, +) + +from ..crypto.rng import random_bytes +from ..exceptions import SPSDKError, SPSDKValueError +from ..utils.exceptions import SPSDKTimeoutError + +# for generics +T = TypeVar("T") # pylint: disable=invalid-name + +logger = logging.getLogger(__name__) + + +class Endianness(str, Enum): + """Endianness enum.""" + + BIG = "big" + LITTLE = "little" + + @classmethod + def values(cls) -> List[str]: + """Get enumeration values.""" + return [mem.value for mem in Endianness.__members__.values()] + + +class BinaryPattern: + """Binary pattern class. + + Supported patterns: + - rand: Random Pattern + - zeros: Filled with zeros + - ones: Filled with all ones + - inc: Filled with repeated numbers incremented by one 0-0xff + - any kind of number, that will be repeated to fill up whole image. + The format could be decimal, hexadecimal, bytes. + """ + + SPECIAL_PATTERNS = ["rand", "zeros", "ones", "inc"] + + def __init__(self, pattern: str) -> None: + """Constructor of pattern class. + + :param pattern: Supported patterns: + - rand: Random Pattern + - zeros: Filled with zeros + - ones: Filled with all ones + - inc: Filled with repeated numbers incremented by one 0-0xff + - any kind of number, that will be repeated to fill up whole image. + The format could be decimal, hexadecimal, bytes. + :raises SPSDKValueError: Unsupported pattern detected. + """ + try: + value_to_int(pattern) + except SPSDKError: + if not pattern in BinaryPattern.SPECIAL_PATTERNS: + raise SPSDKValueError( # pylint: disable=raise-missing-from + f"Unsupported input pattern {pattern}" + ) + + self._pattern = pattern + + def get_block(self, size: int) -> bytes: + """Get block filled with pattern. + + :param size: Size of block to return. + :return: Filled up block with specified pattern. + """ + if self._pattern == "zeros": + return bytes(size) + + if self._pattern == "ones": + return bytes(b"\xff" * size) + + if self._pattern == "rand": + return random_bytes(size) + + if self._pattern == "inc": + return bytes((x & 0xFF for x in range(size))) + + pattern = value_to_bytes(self._pattern, align_to_2n=False) + block = bytes(pattern * (int((size / len(pattern))) + 1)) + return block[:size] + + @property + def pattern(self) -> str: + """Get the pattern. + + :return: Pattern in string representation. + """ + try: + return hex(value_to_int(self._pattern)) + except SPSDKError: + return self._pattern + + +def align(number: int, alignment: int = 4) -> int: + """Align number (size or address) size to specified alignment, typically 4, 8 or 16 bytes boundary. + + :param number: input to be aligned + :param alignment: the boundary to align; typical value is power of 2 + :return: aligned number; result is always >= size (e.g. aligned up) + :raises SPSDKError: When there is wrong alignment + """ + if alignment <= 0 or number < 0: + raise SPSDKError("Wrong alignment") + + return (number + (alignment - 1)) // alignment * alignment + + +def align_block( + data: Union[bytes, bytearray], + alignment: int = 4, + padding: Optional[Union[int, str, BinaryPattern]] = None, +) -> bytes: + """Align binary data block length to specified boundary by adding padding bytes to the end. + + :param data: to be aligned + :param alignment: boundary alignment (typically 2, 4, 16, 64 or 256 boundary) + :param padding: byte to be added or BinaryPattern + :return: aligned block + :raises SPSDKError: When there is wrong alignment + """ + assert isinstance(data, (bytes, bytearray)) + + if alignment < 0: + raise SPSDKError("Wrong alignment") + current_size = len(data) + num_padding = align(current_size, alignment) - current_size + if not num_padding: + return bytes(data) + if not padding: + padding = BinaryPattern("zeros") + elif not isinstance(padding, BinaryPattern): + padding = BinaryPattern(str(padding)) + return bytes(data + padding.get_block(num_padding)) + + +def align_block_fill_random(data: bytes, alignment: int = 4) -> bytes: + """Same as `align_block`, just parameter `padding` is fixed to `-1` to fill with random data.""" + return align_block(data, alignment, BinaryPattern("rand")) + + +def extend_block(data: bytes, length: int, padding: int = 0) -> bytes: + """Add padding to the binary data block to extend the length to specified value. + + :param data: block to be extended + :param length: requested block length; the value must be >= current block length + :param padding: 8-bit value value to be used as a padding + :return: block extended with padding + :raises SPSDKError: When the length is incorrect + """ + current_len = len(data) + if length < current_len: + raise SPSDKError("Incorrect length") + num_padding = length - current_len + if not num_padding: + return data + return data + bytes([padding]) * num_padding + + +def find_first(iterable: Iterable[T], predicate: Callable[[T], bool]) -> Optional[T]: + """Find first element from the list, that matches the condition. + + :param iterable: list of elements + :param predicate: function for selection of the element + :return: found element; None if not found + """ + return next((a for a in iterable if predicate(a)), None) + + +def load_binary(path: str, search_paths: Optional[List[str]] = None) -> bytes: + """Loads binary file into bytes. + + :param path: Path to the file. + :param search_paths: List of paths where to search for the file, defaults to None + :return: content of the binary file as bytes + """ + data = load_file(path, mode="rb", search_paths=search_paths) + assert isinstance(data, bytes) + return data + + +def load_text(path: str, search_paths: Optional[List[str]] = None) -> str: + """Loads text file into string. + + :param path: Path to the file. + :param search_paths: List of paths where to search for the file, defaults to None + :return: content of the text file as string + """ + text = load_file(path, mode="r", search_paths=search_paths) + assert isinstance(text, str) + return text + + +def load_file( + path: str, mode: str = "r", search_paths: Optional[List[str]] = None +) -> Union[str, bytes]: + """Loads a file into bytes. + + :param path: Path to the file. + :param mode: mode for reading the file 'r'/'rb' + :param search_paths: List of paths where to search for the file, defaults to None + :return: content of the binary file as bytes or str (based on mode) + """ + path = find_file(path, search_paths=search_paths) + logger.debug(f"Loading {'binary' if 'b' in mode else 'text'} file from {path}") + with open(path, mode) as f: + return f.read() + + +def write_file( + data: Union[str, bytes], path: str, mode: str = "w", encoding: Optional[str] = None +) -> int: + """Writes data into a file. + + :param data: data to write + :param path: Path to the file. + :param mode: writing mode, 'w' for text, 'wb' for binary data, defaults to 'w' + :param encoding: Encoding of written file ('ascii', 'utf-8'). + :return: number of written elements + """ + path = path.replace("\\", "/") + folder = os.path.dirname(path) + if folder and not os.path.exists(folder): + os.makedirs(folder, exist_ok=True) + + logger.debug(f"Storing {'binary' if 'b' in mode else 'text'} file at {path}") + with open(path, mode, encoding=encoding) as f: + return f.write(data) + + +def get_abs_path(file_path: str, base_dir: Optional[str] = None) -> str: + """Return a full path to the file. + + param base_dir: Base directory to create absolute path, if not specified the system CWD is used. + return: Absolute file path. + """ + if os.path.isabs(file_path): + return file_path.replace("\\", "/") + + return os.path.abspath(os.path.join(base_dir or os.getcwd(), file_path)).replace( + "\\", "/" + ) + + +def _find_path( + path: str, + check_func: Callable[[str], bool], + use_cwd: bool = True, + search_paths: Optional[List[str]] = None, + raise_exc: bool = True, +) -> str: + """Return a full path to the file. + + `search_paths` takes precedence over `CWD` if used (default) + + :param path: File name, part of file path or full path + :param use_cwd: Try current working directory to find the file, defaults to True + :param search_paths: List of paths where to search for the file, defaults to None + :param raise_exc: Raise exception if file is not found, defaults to True + :return: Full path to the file + :raises SPSDKError: File not found + """ + path = path.replace("\\", "/") + + if os.path.isabs(path): + if not check_func(path): + raise SPSDKError(f"Path '{path}' not found") + return path + if search_paths: + for dir_candidate in search_paths: + if not dir_candidate: + continue + dir_candidate = dir_candidate.replace("\\", "/") + path_candidate = get_abs_path(path, base_dir=dir_candidate) + if check_func(path_candidate): + return path_candidate + if use_cwd and check_func(path): + return get_abs_path(path) + # list all directories in error message + searched_in: List[str] = [] + if use_cwd: + searched_in.append(os.path.abspath(os.curdir)) + if search_paths: + searched_in.extend(filter(None, search_paths)) + searched_in = [s.replace("\\", "/") for s in searched_in] + err_str = f"Path '{path}' not found, Searched in: {', '.join(searched_in)}" + if not raise_exc: + logger.debug(err_str) + return "" + raise SPSDKError(err_str) + + +def find_dir( + dir_path: str, + use_cwd: bool = True, + search_paths: Optional[List[str]] = None, + raise_exc: bool = True, +) -> str: + """Return a full path to the directory. + + `search_paths` takes precedence over `CWD` if used (default) + + :param dir_path: Directory name, part of directory path or full path + :param use_cwd: Try current working directory to find the directory, defaults to True + :param search_paths: List of paths where to search for the directory, defaults to None + :param raise_exc: Raise exception if directory is not found, defaults to True + :return: Full path to the directory + :raises SPSDKError: File not found + """ + return _find_path( + path=dir_path, + check_func=os.path.isdir, + use_cwd=use_cwd, + search_paths=search_paths, + raise_exc=raise_exc, + ) + + +def find_file( + file_path: str, + use_cwd: bool = True, + search_paths: Optional[List[str]] = None, + raise_exc: bool = True, +) -> str: + """Return a full path to the file. + + `search_paths` takes precedence over `CWD` if used (default) + + :param file_path: File name, part of file path or full path + :param use_cwd: Try current working directory to find the file, defaults to True + :param search_paths: List of paths where to search for the file, defaults to None + :param raise_exc: Raise exception if file is not found, defaults to True + :return: Full path to the file + :raises SPSDKError: File not found + """ + return _find_path( + path=file_path, + check_func=os.path.isfile, + use_cwd=use_cwd, + search_paths=search_paths, + raise_exc=raise_exc, + ) + + +@contextlib.contextmanager +def use_working_directory(path: str) -> Iterator[None]: + # pylint: disable=missing-yield-doc + """Execute the block in given directory. + + Cd into specific directory. + Execute the block. + Change the directory back into the original one. + + :param path: the path, where the current directory will be changed to + """ + current_dir = os.getcwd() + try: + os.chdir(path) + yield + finally: + os.chdir(current_dir) + assert os.getcwd() == current_dir + + +def format_value( + value: int, size: int, delimiter: str = "_", use_prefix: bool = True +) -> str: + """Convert the 'value' into either BIN or HEX string, depending on 'size'. + + if 'size' is divisible by 8, function returns HEX, BIN otherwise + digits in result string are grouped by 4 using 'delimiter' (underscore) + """ + padding = size if size % 8 else (size // 8) * 2 + infix = "b" if size % 8 else "x" + sign = "-" if value < 0 else "" + parts = re.findall(".{1,4}", f"{abs(value):0{padding}{infix}}"[::-1]) + rev = delimiter.join(parts)[::-1] + prefix = f"0{infix}" if use_prefix else "" + return f"{sign}{prefix}{rev}" + + +def get_bytes_cnt_of_int( + value: int, align_to_2n: bool = True, byte_cnt: Optional[int] = None +) -> int: + """Returns count of bytes needed to store handled integer. + + :param value: Input integer value. + :param align_to_2n: The result will be aligned to standard sizes 1,2,4,8,12,16,20. + :param byte_cnt: The result count of bytes. + :raises SPSDKValueError: The integer input value doesn't fit into byte_cnt. + :return: Number of bytes needed to store integer. + """ + cnt = 0 + if value == 0: + return byte_cnt or 1 + + while value != 0: + value >>= 8 + cnt += 1 + + if align_to_2n and cnt > 2: + cnt = int(ceil(cnt / 4)) * 4 + + if byte_cnt and cnt > byte_cnt: + raise SPSDKValueError( + f"Value takes more bytes than required byte count {byte_cnt} after align." + ) + + cnt = byte_cnt or cnt + + return cnt + + +def value_to_int( + value: Union[bytes, bytearray, int, str], default: Optional[int] = None +) -> int: + """Function loads value from lot of formats to integer. + + :param value: Input value. + :param default: Default Value in case of invalid input. + :return: Value in Integer. + :raises SPSDKError: Unsupported input type. + """ + if isinstance(value, int): + return value + + if isinstance(value, (bytes, bytearray)): + return int.from_bytes(value, Endianness.BIG.value) + + if isinstance(value, str) and value != "": + match = re.match( + r"(?P0[box])?(?P[0-9a-f_]+)(?P[ul]{0,3})$", + value.strip().lower(), + ) + if match: + base = {"0b": 2, "0o": 8, "0": 10, "0x": 16, None: 10}[ + match.group("prefix") + ] + try: + return int(match.group("number"), base=base) + except ValueError: + pass + + if default is not None: + return default + raise SPSDKError(f"Invalid input number type({type(value)}) with value ({value})") + + +def value_to_bytes( + value: Union[bytes, bytearray, int, str], + align_to_2n: bool = True, + byte_cnt: Optional[int] = None, + endianness: Endianness = Endianness.BIG, +) -> bytes: + """Function loads value from lot of formats. + + :param value: Input value. + :param align_to_2n: When is set, the function aligns length of return array to 1,2,4,8,12 etc. + :param byte_cnt: The result count of bytes. + :param endianness: The result bytes endianness ['big', 'little']. + :return: Value in bytes. + """ + if isinstance(value, bytes): + return value + + if isinstance(value, bytearray): + return bytes(value) + + value = value_to_int(value) + return value.to_bytes( + get_bytes_cnt_of_int(value, align_to_2n, byte_cnt=byte_cnt), endianness.value + ) + + +def value_to_bool(value: Union[bool, int, str]) -> bool: + """Function decode bool value from various formats. + + :param value: Input value. + :return: Boolean value. + :raises SPSDKError: Unsupported input type. + """ + if isinstance(value, bool): + return value + + if isinstance(value, int): + return bool(value) + + if isinstance(value, str): + return value in ("True", "T", "1") + + raise SPSDKError(f"Invalid input Boolean type({type(value)}) with value ({value})") + + +def load_hex_string( + source: Optional[Union[str, int, bytes]], + expected_size: int, + search_paths: Optional[List[str]] = None, +) -> bytes: + """Get the HEX string from the command line parameter (Keys, digests, etc). + + :param source: File path to key file or hexadecimal value. If not specified random value is used. + :param expected_size: Expected size of key in bytes. + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Invalid key + :return: Key in bytes. + """ + if not source: + logger.warning( + f"The key source is not specified, the random value is used in size of {expected_size} B." + ) + return random_bytes(expected_size) + + key = None + assert expected_size > 0, "Invalid expected size of key" + if isinstance(source, (bytes, int)): + return value_to_bytes(source, byte_cnt=expected_size) + + try: + file_path = find_file(source, search_paths=search_paths) + try: + str_key = load_file(file_path) + assert isinstance(str_key, str) + if not str_key.startswith(("0x", "0X")): + str_key = "0x" + str_key + key = value_to_bytes(str_key, byte_cnt=expected_size) + if len(key) != expected_size: + raise SPSDKError("Invalid Key size.") + except (SPSDKError, UnicodeDecodeError): + key = load_binary(file_path) + except Exception: + try: + if not source.startswith(("0x", "0X")): + source = "0x" + source + key = value_to_bytes(source, byte_cnt=expected_size) + except SPSDKError: + pass + + if key is None or len(key) != expected_size: + raise SPSDKError(f"Invalid key input: {source}") + + return key + + +def reverse_bytes_in_longs(arr: bytes) -> bytes: + """The function reverse byte order in longs from input bytes. + + :param arr: Input array. + :return: New array with reversed bytes. + :raises SPSDKError: Raises when invalid value is in input. + """ + arr_len = len(arr) + if arr_len % 4 != 0: + raise SPSDKError("The input array is not in modulo 4!") + + result = bytearray() + + for x in range(0, arr_len, 4): + word = bytearray(arr[x : x + 4]) + word.reverse() + result.extend(word) + return bytes(result) + + +def reverse_bits_in_bytes(arr: bytes) -> bytes: + """The function reverse bits order in input bytes. + + :param arr: Input array. + :return: New array with reversed bits in bytes. + :raises SPSDKError: Raises when invalid value is in input. + """ + result = bytearray() + + for x in arr: + result.append(int(f"{x:08b}"[::-1], 2)) + + return bytes(result) + + +def change_endianness(bin_data: bytes) -> bytes: + """Convert binary format used in files to binary used in register object. + + :param bin_data: input binary array. + :return: Converted array (practically little to big endianness). + :raises SPSDKError: Invalid value on input. + """ + data = bytearray(bin_data) + length = len(data) + if length == 1: + return data + + if length == 2: + data.reverse() + return data + + # The length of 24 bits is not supported yet + if length == 3: + raise SPSDKError("Unsupported length (3) for change endianness.") + + return reverse_bytes_in_longs(data) + + +class Timeout: + """Simple timeout handle class.""" + + UNITS = { + "s": 1000000, + "ms": 1000, + "us": 1, + } + + def __init__(self, timeout: int, units: str = "s") -> None: + """Simple timeout class constructor. + + :param timeout: Timeout value. + :param units: Timeout units (MUST be from the UNITS list) + :raises SPSDKValueError: Invalid input value. + """ + if units not in self.UNITS: + raise SPSDKValueError("Units are not in supported units.") + self.enabled = timeout != 0 + self.timeout_us = timeout * self.UNITS[units] + self.start_time_us = self._get_current_time_us() + self.end_time = self.start_time_us + self.timeout_us + self.units = units + + @staticmethod + def _get_current_time_us() -> int: + """Returns current system time in microseconds. + + :return: Current time in microseconds + """ + return ceil(time.time() * 1_000_000) + + def _convert_to_units(self, time_us: int) -> int: + """Converts time in us into used units. + + :param time_us: Time in micro seconds. + :return: Time in user units. + """ + return time_us // self.UNITS[self.units] + + def get_consumed_time(self) -> int: + """Returns consumed time since start of timeout operation. + + :return: Consumed time in units as the class was constructed + """ + return self._convert_to_units(self._get_current_time_us() - self.start_time_us) + + def get_consumed_time_ms(self) -> int: + """Returns consumed time since start of timeouted operation in milliseconds. + + :return: Consumed time in milliseconds + """ + return (self._get_current_time_us() - self.start_time_us) // 1000 + + def get_rest_time(self, raise_exc: bool = False) -> int: + """Returns rest time to timeout overflow. + + :param raise_exc: If set, the function raise SPSDKTimeoutError in case of overflow. + :return: Rest time in units as the class was constructed + :raises SPSDKTimeoutError: In case of overflow + """ + if self.enabled and self._get_current_time_us() > self.end_time and raise_exc: + raise SPSDKTimeoutError("Timeout of operation.") + + return ( + self._convert_to_units(self.end_time - self._get_current_time_us()) + if self.enabled + else 0 + ) + + def get_rest_time_ms(self, raise_exc: bool = False) -> int: + """Returns rest time to timeout overflow. + + :param raise_exc: If set, the function raise SPSDKTimeoutError in case of overflow. + :return: Rest time in milliseconds + :raises SPSDKTimeoutError: In case of overflow + """ + if self.enabled and self._get_current_time_us() > self.end_time and raise_exc: + raise SPSDKTimeoutError("Timeout of operation.") + + # pylint: disable=superfluous-parens # because PEP20: Readability counts + return ( + ((self.end_time - self._get_current_time_us()) // 1000) + if self.enabled + else 0 + ) + + def overflow(self, raise_exc: bool = False) -> bool: + """Check the the timer has been overflowed. + + :param raise_exc: If set, the function raise SPSDKTimeoutError in case of overflow. + :return: True if timeout overflowed, False otherwise. + :raises SPSDKTimeoutError: In case of overflow + """ + overflow = self.enabled and self._get_current_time_us() > self.end_time + if overflow and raise_exc: + raise SPSDKTimeoutError("Timeout of operation.") + return overflow + + +def size_fmt(num: Union[float, int], use_kibibyte: bool = True) -> str: + """Size format.""" + base, suffix = [(1000.0, "B"), (1024.0, "iB")][use_kibibyte] + i = "B" + for i in ["B"] + [i + suffix for i in list("kMGTP")]: + if num < base: + break + num /= base + + return f"{int(num)} {i}" if i == "B" else f"{num:3.1f} {i}" + + +def numberify_version( + version: str, separator: str = ".", valid_numbers: int = 3 +) -> int: + """Turn version string into a number. + + Each group is weighted by a multiple of 1000 + + 1.2.3 -> 1 * 1_000_000 + 2 * 1_000 + 3 * 1 = 1_002_003 + 21.100.9 -> 21 * 1_000_000 + 100 * 1_000 + 9 * 1 = 21_100_009 + + :param version: Version string numbers separated by `separator` + :param separator: Separator used in the version string, defaults to "." + :param valid_numbers: Amount of numbers to sanitize to consider, defaults to 3 + :return: Number representing the version + """ + sanitized_version = sanitize_version( + version=version, separator=separator, valid_numbers=valid_numbers + ) + return int( + sum( + int(number) * math.pow(10, 3 * order) + for order, number in enumerate(reversed(sanitized_version.split(separator))) + ) + ) + + +def sanitize_version(version: str, separator: str = ".", valid_numbers: int = 3) -> str: + """Sanitize version string. + + Append '.0' in case version string has fewer parts than `valid_numbers` + Remove right-most version parts after `valid_numbers` amount of parts + + 1.2 -> 1.2.0 + 1.2.3.4 -> 1.2.3 + + :param version: Original version string + :param separator: Separator used in the version string, defaults to "." + :param valid_numbers: Amount of numbers to sanitize, defaults to 3 + :return: Sanitized version string + """ + version_parts = version.split(separator) + version_parts += ["0"] * (valid_numbers - len(version_parts)) + return separator.join(version_parts[:valid_numbers]) + + +def get_key_by_val(value: str, dictionary: Dict[str, List[str]]) -> str: + """Return key by its value. + + :param value: Value to find. + :param dictionary: Dictionary to find in. + :raises SPSDKValueError: Value is not present in dictionary. + :return: Key name + """ + for key, item in dictionary.items(): + if value.lower() in [x.lower() for x in item]: + return key + + raise SPSDKValueError(f"Value {value} is not in {dictionary}.") + + +def swap16(x: int) -> int: + """Swap bytes in half word (16bit). + + :param x: Original number + :return: Number with swapped bytes + :raises SPSDKError: When incorrect number to be swapped is provided + """ + if x < 0 or x > 0xFFFF: + raise SPSDKError("Incorrect number to be swapped") + return ((x << 8) & 0xFF00) | ((x >> 8) & 0x00FF) + + +def swap32(x: int) -> int: + """Swap 32 bit integer. + + :param x: integer to be swapped + :return: swapped value + :raises SPSDKError: When incorrect number to be swapped is provided + """ + if x < 0 or x > 0xFFFFFFFF: + raise SPSDKError("Incorrect number to be swapped") + return unpack("I", x))[0] + + +def check_range(x: int, start: int = 0, end: int = (1 << 32) - 1) -> bool: + """Check if the number is in range. + + :param x: Number to check. + :param start: Lower border of range, default is 0. + :param end: Upper border of range, default is unsigned 32-bit range. + :return: True if fits, False otherwise. + """ + if start > x > end: + return False + + return True + + +def load_configuration(path: str, search_paths: Optional[List[str]] = None) -> Dict: + """Load configuration from yml/json file. + + :param path: Path to configuration file + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: When unsupported file is provided + :return: Content of configuration as dictionary + """ + try: + config = load_text(path, search_paths=search_paths) + except Exception as exc: + raise SPSDKError(f"Can't load configuration file: {str(exc)}") from exc + + try: + return json.loads(config) + except json.JSONDecodeError: + # import YAML only if needed to save startup time + from yaml import YAMLError, safe_load # pylint: disable=import-outside-toplevel + + try: + return safe_load(config) + except (YAMLError, UnicodeDecodeError): + pass + + raise SPSDKError(f"Unable to load '{path}'.") + + +def split_data( + data: Union[bytearray, bytes], size: int +) -> Generator[bytes, None, None]: + """Split data into chunks of size. + + :param bytearray data: array of bytes to be split + :param int size: size of splitted array + :return Generator[bytes]: splitted array + """ + for i in range(0, len(data), size): + yield data[i : i + size] + + +def get_hash(text: Union[str, bytes]) -> str: + """Returns hash of given text.""" + if isinstance(text, str): + text = text.encode("utf-8") + return hashlib.sha1(text).digest().hex()[:8] + + +def deep_update(d: Dict, u: Dict) -> Dict: + """Deep update nested dictionaries. + + :param d: Dictionary that will be updated + :param u: Dictionary with update information + :returns: Updated dictionary. + """ + for k, v in u.items(): + if isinstance(v, dict): + d[k] = deep_update(d.get(k, {}), v) + else: + d[k] = v + return d + + +def wrap_text(text: str, max_line: int = 100) -> str: + """Wrap text in SPSDK standard. + + Count with new lines in input string and do wrapping after that. + + :param text: Text to wrap + :param max_line: Max line in output, defaults to 100 + :return: Wrapped text (added new lines characters on right places) + """ + lines = text.splitlines() + return "\n".join([textwrap.fill(text=line, width=max_line) for line in lines]) + + +TS = TypeVar("TS", bound="SingletonMeta") # pylint: disable=invalid-name + + +class SingletonMeta(type): + """Singleton metaclass.""" + + _instance = None + + def __call__(cls: Type[TS], *args: Any, **kwargs: Any) -> TS: # type: ignore + """Call dunder override.""" + if cls._instance is None: + instance = super().__call__(*args, **kwargs) + cls._instance = instance + return cls._instance diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/plugins.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/plugins.py new file mode 100644 index 00000000..0db7b0a6 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/plugins.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause +"""SPSDK plugins manager.""" + +import logging +import os +import sys +from importlib.machinery import ModuleSpec +from importlib.util import find_spec, module_from_spec, spec_from_file_location +from types import ModuleType +from typing import Dict, List, Optional + +import importlib_metadata + +from ..exceptions import SPSDKError, SPSDKTypeError +from ..utils.misc import SingletonMeta +from ..utils.spsdk_enum import SpsdkEnum + +logger = logging.getLogger(__name__) + + +class PluginType(SpsdkEnum): + """Contains commands tags.""" + + SIGNATURE_PROVIDER = (0, "spsdk.sp", "Signature provider") + DEVICE_INTERFACE = (1, "spsdk.device.interface", "Device interface") + DEBUG_PROBE = (2, "spsdk.debug_probe", "Debug Probe") + WPC_SERVICE = (3, "spsdk.wpc.service", "WPC Service") + + +class PluginsManager(metaclass=SingletonMeta): + """Plugin manager.""" + + def __init__(self) -> None: + """Plugin manager constructor.""" + self.plugins: Dict[str, ModuleType] = {} + + def load_from_entrypoints(self, group_name: Optional[str] = None) -> int: + """Load modules from given setuptools group. + + :param group_name: Entry point group to load plugins + + :return: The number of loaded plugins. + """ + if group_name is not None and not isinstance(group_name, str): + raise SPSDKTypeError("Group name must be of string type.") + group_names = ( + [group_name] + if group_name is not None + else [PluginType.get_label(tag) for tag in PluginType.tags()] + ) + + entry_points: List[importlib_metadata.EntryPoint] = [] + for group_name in group_names: + eps = importlib_metadata.entry_points(group=group_name) + entry_points.extend(eps) + + count = 0 + for ep in entry_points: + try: + plugin = ep.load() + except (ModuleNotFoundError, ImportError) as exc: + logger.warning(f"Module {ep.module} could not be loaded: {exc}") + continue + logger.info(f"Plugin {ep.name} has been loaded.") + self.register(plugin) + count += 1 + return count + + def load_from_source_file( + self, source_file: str, module_name: Optional[str] = None + ) -> None: + """Import Python source file directly. + + :param source_file: Path to python source file: absolute or relative to cwd + :param module_name: Name for the new module, default is basename of the source file + :raises SPSDKError: If importing of source file failed + """ + name = module_name or os.path.splitext(os.path.basename(source_file))[0] + spec = spec_from_file_location(name=name, location=source_file) + if not spec: + raise SPSDKError( + f"Source '{source_file}' does not exist. Check if it is valid file path name" + ) + + module = self._import_module_spec(spec) + self.register(module) + + def load_from_module_name(self, module_name: str) -> None: + """Import Python module directly. + + :param module_name: Module name to be imported + :raises SPSDKError: If importing of source file failed + """ + spec = find_spec(name=module_name) + if not spec: + raise SPSDKError( + f"Source '{module_name}' does not exist.Check if it is valid file module name" + ) + module = self._import_module_spec(spec) + self.register(module) + + def _import_module_spec(self, spec: ModuleSpec) -> ModuleType: + """Import module from module specification. + + :param spec: Module specification + :return: Imported module type + """ + module = module_from_spec(spec) + try: + sys.modules[spec.name] = module + spec.loader.exec_module(module) # type: ignore + logger.debug(f"A module spec {spec.name} has been loaded.") + except Exception as e: + raise SPSDKError(f"Failed to load module spec {spec.name}: {e}") from e + return module + + def register(self, plugin: ModuleType) -> None: + """Register a plugin with the given name. + + :param plugin: Plugin as a module + """ + plugin_name = self.get_plugin_name(plugin) + if plugin_name in self.plugins: + logger.debug(f"Plugin {plugin_name} has been already registered.") + return + self.plugins[plugin_name] = plugin + logger.debug(f"A plugin {plugin_name} has been registered.") + + def get_plugin(self, name: str) -> Optional[ModuleType]: + """Return a plugin for the given name. + + :param name: Plugin name + :return: Plugin or None if plugin with name is not registered + """ + return self.plugins.get(name) + + def get_plugin_name(self, plugin: ModuleType) -> str: + """Get canonical name of plugin. + + :param plugin: Plugin as a module + :return: String with plugin name + """ + name = getattr(plugin, "__name__", None) + if name is None: + raise SPSDKError("Plugin name could not be determined.") + return name + + +def load_plugin_from_source(source: str, name: Optional[str] = None) -> None: + """Load plugin from source. + + :param source: The source to be loaded + Accepted values: + - Path to source file + - Existing module name + - Existing entrypoint + :param name: Name for the new module/plugin + """ + manager = PluginsManager() + if name and name in manager.plugins: + logger.debug(f"Plugin {name} has been already registered.") + return + try: + return manager.load_from_source_file(source) + except SPSDKError: + pass + try: + manager.load_from_module_name(source) + return + except SPSDKError: + pass + try: + manager.load_from_entrypoints(source) + return + except SPSDKError: + pass + raise SPSDKError(f"Unable to load from source '{source}'.") diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/schema_validator.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/schema_validator.py new file mode 100644 index 00000000..269627db --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/schema_validator.py @@ -0,0 +1,771 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# Copyright 2021-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module for schema-based configuration validation.""" + +import copy +import io +import logging +import os +from collections import OrderedDict +from typing import Any, Callable, Dict, List, Optional, Union + +import fastjsonschema +from deepmerge import Merger, always_merger +from deepmerge.strategy.dict import DictStrategies +from deepmerge.strategy.list import ListStrategies +from deepmerge.strategy.set import SetStrategies +from ruamel.yaml import YAML +from ruamel.yaml.comments import CommentedMap as CMap +from ruamel.yaml.comments import CommentedSeq as CSeq + +SPSDK_YML_INDENT = 2 + +from ..exceptions import SPSDKError +from ..utils.misc import ( + find_dir, + find_file, + load_configuration, + value_to_int, + wrap_text, + write_file, +) +from ..utils.spsdk_enum import SpsdkEnum + +ENABLE_DEBUG = False + +logger = logging.getLogger(__name__) + + +def cmap_update(cmap: CMap, updater: CMap) -> None: + """Update CMap including comments. + + :param cmap: Original CMap to be updated. + :param updater: CMap updater. + """ + cmap.update(updater) + cmap.ca.items.update(updater.ca.items) + + +class PropertyRequired(SpsdkEnum): + """Enum describing if the property is required or optional.""" + + REQUIRED = (0, "REQUIRED", "Required") + CONDITIONALLY_REQUIRED = (1, "CONDITIONALLY_REQUIRED", "Conditionally required") + OPTIONAL = (2, "OPTIONAL", "Optional") + + +class SPSDKListStrategies(ListStrategies): + """Extended List Strategies.""" + + # pylint: disable=unused-argument # because of the base class + @staticmethod + def strategy_set(_config, _path, base, nxt): # type: ignore + """Use the set of both as a output.""" + try: + ret = list(set(base + nxt)) + ret.sort() + except TypeError: + try: + ret = base + nxt + except TypeError: + logger.warning( + "Found unhashable object in List 'set' strategy during merge." + " It was used 'override' method instead of 'set'." + ) + ret = nxt + return ret + + +class SPSDKMerger(Merger): + """Modified Merger to add new list strategy 'set'.""" + + PROVIDED_TYPE_STRATEGIES = { + list: SPSDKListStrategies, + dict: DictStrategies, + set: SetStrategies, + } + + +def _is_number(param: Any) -> bool: + """Checks whether the input represents a number. + + :param param: Input to analyze + :raises SPSDKError: Input doesn't represent a number + :return: True if input represents a number + """ + try: + value_to_int(param) + return True + except SPSDKError: + return False + + +def _is_hex_number(param: Any) -> bool: + """Checks whether the input represents a hexnumber. + + :param param: Input to analyze + :raises SPSDKError: Input doesn't represent a hexnumber + :return: True if input represents a hexnumber + """ + try: + bytes.fromhex(param) + return True + except (TypeError, ValueError): + return False + + +def _print_validation_fail_reason( + exc: fastjsonschema.JsonSchemaValueException, + extra_formatters: Optional[Dict[str, Callable[[str], bool]]] = None, +) -> str: + """Print formatted and easy to read reason why the validation failed. + + :param exc: Original exception. + :param extra_formatters: Additional custom formatters + :return: String explaining the reason of fail. + """ + + def process_nested_rule( + exception: fastjsonschema.JsonSchemaValueException, + extra_formatters: Optional[Dict[str, Callable[[str], bool]]], + ) -> str: + message = "" + for rule_def_ix, rule_def in enumerate(exception.rule_definition): + try: + validator = fastjsonschema.compile(rule_def, formats=extra_formatters) + validator(exception.value) + message += f"\nRule#{rule_def_ix} passed.\n" + except fastjsonschema.JsonSchemaValueException as _exc: + message += ( + f"\nReason of fail for {exception.rule} rule#{rule_def_ix}: " + f"\n {_print_validation_fail_reason(_exc , extra_formatters)}\n" + ) + if all(rule_def.get("required") for rule_def in exception.rule_definition): + message += f"\nYou need to define {exception.rule} of the following sets:" + for rule_def in exc.rule_definition: + message += f" {rule_def['required']}" + return message + + message = str(exc) + if exc.rule == "required": + missing = filter(lambda x: x not in exc.value.keys(), exc.rule_definition) + message += f"; Missing field(s): {', '.join(missing)}" + elif exc.rule == "format": + if exc.rule_definition == "file": + message += f"; Non-existing file: {exc.value}" + message += ( + "; The file must exists even if the key is NOT used in configuration." + ) + elif exc.rule == "anyOf": + message += process_nested_rule(exc, extra_formatters=extra_formatters) + elif exc.rule == "oneOf": + message += process_nested_rule(exc, extra_formatters=extra_formatters) + return message + + +def check_config( + config: Union[str, Dict[str, Any]], + schemas: List[Dict[str, Any]], + extra_formatters: Optional[Dict[str, Callable[[str], bool]]] = None, + search_paths: Optional[List[str]] = None, +) -> None: + """Check the configuration by provided list of validation schemas. + + :param config: Configuration to check + :param schemas: List of validation schemas + :param extra_formatters: Additional custom formatters + :param search_paths: List of paths where to search for the file, defaults to None + :raises SPSDKError: Invalid validation schema or configuration + """ + custom_formatters: Dict[str, Callable[[str], bool]] = { + "dir": lambda x: bool(find_dir(x, search_paths=search_paths, raise_exc=False)), + "file": lambda x: bool( + find_file(x, search_paths=search_paths, raise_exc=False) + ), + "file_name": lambda x: os.path.basename(x.replace("\\", "/")) not in ("", None), + "optional_file": lambda x: not x + or bool(find_file(x, search_paths=search_paths, raise_exc=False)), + "number": _is_number, + "hex_value": _is_hex_number, + } + if isinstance(config, str): + config_to_check = load_configuration(config) + config_dir = os.path.dirname(config) + if search_paths: + search_paths.append(config_dir) + else: + search_paths = [config_dir] + else: + config_to_check = copy.deepcopy(config) + + schema: Dict[str, Any] = {} + for sch in schemas: + always_merger.merge(schema, copy.deepcopy(sch)) + validator = None + formats = always_merger.merge(custom_formatters, extra_formatters or {}) + try: + if ENABLE_DEBUG: + validator_code = fastjsonschema.compile_to_code(schema, formats=formats) + write_file(validator_code, "validator_file.py") + else: + validator = fastjsonschema.compile(schema, formats=formats) + except (TypeError, fastjsonschema.JsonSchemaDefinitionException) as exc: + raise SPSDKError( + f"Invalid validation schema to check config: {str(exc)}" + ) from exc + try: + if ENABLE_DEBUG: + import validator_file # type: ignore + + validator_file.validate(config_to_check, formats) + else: + assert validator is not None + validator(config_to_check) + except fastjsonschema.JsonSchemaValueException as exc: + message = _print_validation_fail_reason(exc, formats) + raise SPSDKError(f"Configuration validation failed: {message}") from exc + + +class CommentedConfig: + """Class for generating commented config templates or custom configurations.""" + + MAX_LINE_LENGTH = 120 - 2 # Minus '# ' + + def __init__( + self, + main_title: str, + schemas: List[Dict[str, Any]], + note: Optional[str] = None, + ): + """Constructor for Config templates. + + :param main_title: Main title of final template. + :param schemas: Main description of final template. + :param note: Additional Note after title test. + """ + self.main_title = main_title + self.schemas = schemas + self.indent = 0 + self.note = note + self.creating_configuration = False + + @property + def max_line(self) -> int: + """Maximal line with current indent.""" + return self.MAX_LINE_LENGTH - max(SPSDK_YML_INDENT * (self.indent - 1), 0) + + def _get_title_block(self, title: str, description: Optional[str] = None) -> str: + """Get unified title blob. + + :param title: Simple title of block + :param description: Description of block + :return: ASCII art block + """ + delimiter = "=" * self.max_line + title_str = f" == {title} == " + title_str = title_str.center(self.max_line) + + ret = delimiter + "\n" + title_str + "\n" + if description: + wrapped_description = wrap_text(description, self.max_line) + lines = wrapped_description.splitlines() + ret += "\n".join([line.center(self.max_line) for line in lines]) + ret += "\n" + ret += delimiter + return ret + + @staticmethod + def get_property_optional_required( + key: str, block: Dict[str, Any] + ) -> PropertyRequired: + """Function to determine if the config property is required or not. + + :param key: Name of config record + :param block: Source data block + :return: Final description. + """ + schema_kws = ["allOf", "anyOf", "oneOf", "if", "then", "else"] + + def _find_required(d_in: Dict[str, Any]) -> Optional[List[str]]: + if "required" in d_in: + return d_in["required"] + + for d_v in d_in.values(): + if isinstance(d_v, dict): + ret = _find_required(d_v) + if ret: + return ret + return None + + def _find_required_in_schema_kws( + schema_node: Union[List, Dict[str, Any]] + ) -> List[str]: + """Find all required properties in structure composed of nested properties.""" + all_props: List[str] = [] + if isinstance(schema_node, dict): + for k, v in schema_node.items(): + if k == "required": + all_props.extend(v) + elif k in schema_kws: + req_props = _find_required_in_schema_kws(v) + all_props.extend(req_props) + if isinstance(schema_node, list): + for item in schema_node: + req_props = _find_required_in_schema_kws(item) + all_props.extend(req_props) + return list(set(all_props)) + + if "required" in block and key in block["required"]: + return PropertyRequired.REQUIRED + + for val in block.values(): + if isinstance(val, dict): + ret = _find_required(val) + if ret and key in ret: + return PropertyRequired.CONDITIONALLY_REQUIRED + + actual_kws = {k: v for k, v in block.items() if k in schema_kws} + ret = _find_required_in_schema_kws(actual_kws) + if key in ret: + return PropertyRequired.CONDITIONALLY_REQUIRED + + return PropertyRequired.OPTIONAL + + def _create_object_block( + self, + block: Dict[str, Dict[str, Any]], + custom_value: Optional[Union[Dict[str, Any], List[Any]]] = None, + ) -> CMap: + """Private function used to create object block with data. + + :param block: Source block with data + :param custom_value: + Optional dictionary or List of properties to be exported. + It is recommended to pass OrderedDict to preserve the key order. + - key is property ID to be exported + - value is its value; or None if default value shall be used + :return: CMap or CSeq base configuration object + :raises SPSDKError: In case of invalid data pattern. + """ + assert block.get("type") == "object" + self.indent += 1 + + assert "properties" in block.keys() + + cfg_m = CMap() + for key in self._get_schema_block_keys(block): + assert ( + key in block["properties"].keys() + ), f"Missing key ({key}, in block properties. Block title: {block.get('title', 'Unknown')})" + + # Skip the record in case that custom value key is defined, + # but it has None value as a mark to not use this record + value = custom_value.get(key, None) if custom_value else None # type: ignore + if custom_value and value is None: + continue + + val_p: Dict = block["properties"][key] + value_to_add = self._get_schema_value(val_p, value) + if value_to_add is None: + raise SPSDKError(f"Cannot create the value for {key}") + + cfg_m[key] = value_to_add + required = self.get_property_optional_required(key, block).description + assert required + self._add_comment( + cfg_m, + val_p, + key, + value_to_add, + required, + ) + + self.indent -= 1 + return cfg_m + + def _create_array_block( + self, block: Dict[str, Dict[str, Any]], custom_value: Optional[List[Any]] + ) -> CSeq: + """Private function used to create array block with data. + + :param block: Source block with data + :return: CS base configuration object + :raises SPSDKError: In case of invalid data pattern. + """ + assert block.get("type") == "array" + assert "items" in block.keys() + self.indent += 1 + val_i: Dict = block["items"] + + cfg_s = CSeq() + if custom_value is not None: + for cust_val in custom_value: + value = self._get_schema_value(val_i, cust_val) + if isinstance(value, (CSeq, List)): + cfg_s.extend(value) + else: + cfg_s.append(value) + else: + value = self._get_schema_value(val_i, None) + # the template_value can be the actual list(not only one element) + if isinstance(value, (CSeq, List)): + cfg_s.extend(value) + else: + cfg_s.append(value) + self.indent -= 1 + return cfg_s + + @staticmethod + def _check_matching_oneof_option(one_of: Dict[str, Any], cust_val: Any) -> bool: + """Find matching given custom value to "oneOf" schema. + + :param one_of:oneOf schema + :param cust_val: custom value + :raises SPSDKError: if not found + """ + + def check_type(option: Dict, t: str) -> bool: + option_type = option.get("type") + if isinstance(option_type, list): + return t in option_type + return t == option_type + + if cust_val: + if isinstance(cust_val, dict) and check_type(one_of, "object"): + properties = one_of.get("properties") + assert properties, "non-empty properties must be defined" + if all([key in properties for key in cust_val.keys()]): + return True + + if isinstance(cust_val, str) and check_type(one_of, "string"): + return True + + if isinstance(cust_val, int) and check_type(one_of, "number"): + return True + + return False + + def _handle_one_of_block( + self, + block: Dict[str, Any], + custom_value: Optional[Union[Dict[str, Any], List[Any]]] = None, + ) -> CMap: + """Private function used to create oneOf block with data, and return as an array that contains all values. + + :param block: Source block with data + :param custom_value: custom value to fill the array + :return: CS base configuration object + """ + + def get_help_name(schema: Dict) -> str: + if schema.get("type") == "object": + options = list(schema["properties"].keys()) + if len(options) == 1: + return options[0] + return str(options) + return str(schema.get("title", schema.get("type", "Unknown"))) + + ret = CMap() + one_of = block + assert isinstance(one_of, list) + if custom_value is not None: + for i, one_option in enumerate(one_of): + if not self._check_matching_oneof_option(one_option, custom_value): + continue + return self._get_schema_value(one_option, custom_value) + raise SPSDKError("Any allowed option matching the configuration data") + + # Check the restriction into templates in oneOf block + one_of_mod = [] + for x in one_of: + skip = x.get("skip_in_template", False) + if not skip: + one_of_mod.append(x) + + # In case that only one oneOf option left just return simple value + if len(one_of_mod) == 1: + return self._get_schema_value(one_of_mod[0], custom_value) + + option_types = ", ".join([get_help_name(x) for x in one_of_mod]) + title = f"List of possible {len(one_of_mod)} options." + for i, option in enumerate(one_of_mod): + if option.get("type") != "object": + continue + value = self._get_schema_value(option, None) + assert isinstance(value, CMap) + cmap_update(ret, value) + + key = list(value.keys())[0] + comment = "" + if i == 0: + comment = ( + self._get_title_block(title, f"Options [{option_types}]") + "\n" + ) + comment += "\n " + ( + f" [Example of possible configuration #{i}] ".center(self.max_line, "=") + ) + self._update_before_comment(cfg=ret, key=key, comment=comment) + return ret + + def _get_schema_value( + self, block: Dict[str, Any], custom_value: Any + ) -> Union[CMap, CSeq, str, int, float, List]: + """Private function used to fill up configuration block with data. + + :param block: Source block with data + :param custom_value: value to be saved instead of default value + :return: CM/CS base configuration object with comment + :raises SPSDKError: In case of invalid data pattern. + """ + + def get_custom_or_template() -> Any: + assert ( + custom_value or "template_value" in block.keys() + ), f"Template value not provided in {block}" + return ( + custom_value + if (custom_value is not None) + else block.get("template_value", "Unknown") + ) + + ret: Optional[Union[CMap, CSeq, str, int, float]] = None + if "oneOf" in block and not "properties" in block: + ret = self._handle_one_of_block(block["oneOf"], custom_value) + if not ret: + ret = get_custom_or_template() + else: + schema_type = block.get("type") + if not schema_type: + raise SPSDKError(f"Type not available in block: {block}") + assert schema_type, f"Type not available in block: {block}" + + if schema_type == "object": + assert (custom_value is None) or isinstance(custom_value, dict) + ret = self._create_object_block(block, custom_value) + elif schema_type == "array": + assert (custom_value is None) or isinstance(custom_value, list) + ret = self._create_array_block(block, custom_value) + else: + ret = get_custom_or_template() + + assert isinstance(ret, (CMap, CSeq, str, int, float, list)) + + return ret + + def _add_comment( + self, + cfg: Union[CMap, CSeq], + schema: Dict[str, Any], + key: Union[str, int], + value: Optional[Union[CMap, CSeq, str, int, float, List]], + required: str, + ) -> None: + """Private function used to create comment for block. + + :param cfg: Target configuration where the comment should be stored + :param schema: Object configuration JSON SCHEMA + :param key: Config key + :param value: Value of config key + :param required: Required text description + """ + value_len = len(str(key) + ": ") + if value and isinstance(value, (str, int)): + value_len += len(str(value)) + template_title = schema.get("template_title") + title = schema.get("title", "") + descr = schema.get("description", "") + enum_list = schema.get("enum", schema.get("enum_template", [])) + enum = "" + + if len(enum_list): + enum = "Possible options: <" + ", ".join([str(x) for x in enum_list]) + ">" + if title: + # one_line_comment = ( + # f"[{required}] {title}{'; ' if descr else ''}{descr}{';'+enum if enum else ''}" + # ) + # TODO This feature will be disabled since the issue + # https://sourceforge.net/p/ruamel-yaml/tickets/475/ will be solved + # if True: # len(one_line_comment) > self.max_line - value_len: + # Too long comment split it into comment block + comment = f"===== {title} [{required}] =====".center(self.max_line, "-") + if descr: + comment += wrap_text("\nDescription: " + descr, max_line=self.max_line) + if enum: + comment += wrap_text("\n" + enum, max_line=self.max_line) + cfg.yaml_set_comment_before_after_key( + key, comment, indent=SPSDK_YML_INDENT * (self.indent - 1) + ) + # else: + # cfg.yaml_add_eol_comment( + # one_line_comment, + # key=key, + # column=SPSDK_YML_INDENT * (self.indent - 1), + # ) + + if template_title: + self._update_before_comment( + cfg, key, "\n" + self._get_title_block(template_title) + ) + + @staticmethod + def _get_schema_block_keys(schema: Dict[str, Dict[str, Any]]) -> List[str]: + """Creates list of property keys in given schema. + + :param schema: Input schema piece. + :return: List of all property keys. + """ + if "properties" not in schema: + return [] + return [ + key + for key in schema["properties"] + if schema["properties"][key].get("skip_in_template", False) == False + ] + + def _update_before_comment( + self, cfg: Union[CMap, CSeq], key: Union[str, int], comment: str + ) -> None: + """Update comment to add new comment before current one. + + :param sfg: Commented map / Commented Sequence + :param key: Key name + :param comment: Comment that should be place before current one. + """ + from ruamel.yaml.error import CommentMark + from ruamel.yaml.tokens import CommentToken + + def comment_token(s: str, mark: CommentMark) -> CommentToken: + # handle empty lines as having no comment + return CommentToken(("# " if s else "") + s + "\n", mark) + + comments = cfg.ca.items.setdefault(key, [None, None, None, None]) + if not isinstance(comments[1], list): + comments[1] = [] + new_lines = comment.splitlines() + new_lines.reverse() + start_mark = CommentMark(SPSDK_YML_INDENT * (self.indent - 1)) + for c in new_lines: + comments[1].insert(0, comment_token(c, start_mark)) + + def export(self, config: Optional[Dict[str, Any]] = None) -> CMap: + """Export configuration template into CommentedMap. + + :param config: Configuration to be applied to template. + :raises SPSDKError: Error + :return: Configuration template in CM. + """ + self.indent = 0 + self.creating_configuration = bool(config) + loc_schemas = copy.deepcopy(self.schemas) + # 1. Get blocks with their titles and lists of their keys + block_list: Dict[str, Any] = {} + for schema in loc_schemas: + if schema.get("skip_in_template", False): + continue + title = schema.get("title", "General Options") + if title in block_list: + property_list = block_list[title]["properties"] + assert isinstance(property_list, list) + property_list.extend( + [ + x + for x in self._get_schema_block_keys(schema) + if x not in block_list[title]["properties"] + ] + ) + else: + block_list[title] = {} + block_list[title]["properties"] = self._get_schema_block_keys(schema) + block_list[title]["description"] = schema.get("description", "") + + # 2. Merge all schemas together to get whole single schema + schemas_merger = SPSDKMerger( + [(list, ["set"]), (dict, ["merge"]), (set, ["union"])], + ["override"], + ["override"], + ) + + merged: Dict[str, Any] = {} + for schema in loc_schemas: + schemas_merger.merge(merged, copy.deepcopy(schema)) + + # 3. Create order of individual settings + + order_dict: Dict[str, Any] = OrderedDict() + properties_for_template = self._get_schema_block_keys(merged) + for block in block_list.values(): + block_properties: list = block["properties"] + # block_properties.sort() + for block_property in block_properties: + if block_property in properties_for_template: + order_dict[block_property] = merged["properties"][block_property] + merged["properties"] = order_dict + + try: + self.indent = 0 + # 4. Go through all individual logic blocks + cfg = self._create_object_block(merged, config) + assert isinstance(cfg, CMap) + # 5. Add main title of configuration + title = f" {self.main_title} ".center(self.MAX_LINE_LENGTH, "=") + "\n\n" + if self.note: + title += f"\n{' Note '.center(self.MAX_LINE_LENGTH, '-')}\n" + title += wrap_text(self.note, self.max_line) + "\n" + cfg.yaml_set_start_comment(title) + for title, info in block_list.items(): + description = info["description"] + assert isinstance(description, str) or description is None + + first_key = None + for info_key in info["properties"]: + if info_key in cfg.keys(): + first_key = info_key + break + + if first_key: + self._update_before_comment( + cfg, first_key, self._get_title_block(title, description) + ) + + self.creating_configuration = False + return cfg + + except Exception as exc: + self.creating_configuration = False + raise SPSDKError(f"Template generation failed: {str(exc)}") from exc + + def get_template(self) -> str: + """Export Configuration template directly into YAML string format. + + :return: YAML string. + """ + return self.convert_cm_to_yaml(self.export()) + + def get_config(self, config: Dict[str, Any]) -> str: + """Export Configuration directly into YAML string format. + + :return: YAML string. + """ + return self.convert_cm_to_yaml(self.export(config)) + + @staticmethod + def convert_cm_to_yaml(config: CMap) -> str: + """Convert Commented Map for into final YAML string. + + :param config: Configuration in CM format. + :raises SPSDKError: If configuration is empty + :return: YAML string with configuration to use to store in file. + """ + if not config: + raise SPSDKError("Configuration cannot be empty") + yaml = YAML(pure=True) + yaml.indent(sequence=SPSDK_YML_INDENT * 2, offset=SPSDK_YML_INDENT) + stream = io.StringIO() + yaml.dump(config, stream) + yaml_data = stream.getvalue() + + return yaml_data diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/spsdk_enum.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/spsdk_enum.py new file mode 100644 index 00000000..4d1870b8 --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/spsdk_enum.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2023 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Custom enum extension.""" +from dataclasses import dataclass +from enum import Enum +from typing import Callable, List, Optional, Union + +from typing_extensions import Self + +from ..exceptions import SPSDKKeyError, SPSDKTypeError + + +@dataclass(frozen=True) +class SpsdkEnumMember: + """SPSDK Enum member.""" + + tag: int + label: str + description: Optional[str] = None + + +class SpsdkEnum(SpsdkEnumMember, Enum): + """SPSDK Enum type.""" + + def __eq__(self, __value: object) -> bool: + return self.tag == __value or self.label == __value + + def __hash__(self) -> int: + return hash((self.tag, self.label, self.description)) + + @classmethod + def labels(cls) -> List[str]: + """Get list of labels of all enum members. + + :return: List of all labels + """ + return [value.label for value in cls.__members__.values()] + + @classmethod + def tags(cls) -> List[int]: + """Get list of tags of all enum members. + + :return: List of all tags + """ + return [value.tag for value in cls.__members__.values()] + + @classmethod + def contains(cls, obj: Union[int, str]) -> bool: + """Check if given member with given tag/label exists in enum. + + :param obj: Label or tag of enum + :return: True if exists False otherwise + """ + if not isinstance(obj, (int, str)): + raise SPSDKTypeError("Object must be either string or integer") + try: + cls.from_attr(obj) + return True + except SPSDKKeyError: + return False + + @classmethod + def get_tag(cls, label: str) -> int: + """Get tag of enum member with given label. + + :param label: Label to be used for searching + :return: Tag of found enum member + """ + value = cls.from_label(label) + return value.tag + + @classmethod + def get_label(cls, tag: int) -> str: + """Get label of enum member with given tag. + + :param tag: Tag to be used for searching + :return: Label of found enum member + """ + value = cls.from_tag(tag) + return value.label + + @classmethod + def get_description(cls, tag: int, default: Optional[str] = None) -> Optional[str]: + """Get description of enum member with given tag. + + :param tag: Tag to be used for searching + :param default: Default value if member contains no description + :return: Description of found enum member + """ + value = cls.from_tag(tag) + return value.description or default + + @classmethod + def from_attr(cls, attribute: Union[int, str]) -> Self: + """Get enum member with given tag/label attribute. + + :param attribute: Attribute value of enum member + :return: Found enum member + """ + # Let's make MyPy happy, see https://github.com/python/mypy/issues/10740 + if isinstance(attribute, int): + return cls.from_tag(attribute) + else: + return cls.from_label(attribute) + + @classmethod + def from_tag(cls, tag: int) -> Self: + """Get enum member with given tag. + + :param tag: Tag to be used for searching + :raises SPSDKKeyError: If enum with given label is not found + :return: Found enum member + """ + for item in cls.__members__.values(): + if item.tag == tag: + return item + raise SPSDKKeyError( + f"There is no {cls.__name__} item in with tag {tag} defined" + ) + + @classmethod + def from_label(cls, label: str) -> Self: + """Get enum member with given label. + + :param label: Label to be used for searching + :raises SPSDKKeyError: If enum with given label is not found + :return: Found enum member + """ + for item in cls.__members__.values(): + if item.label.upper() == label.upper(): + return item + raise SPSDKKeyError( + f"There is no {cls.__name__} item with label {label} defined" + ) + + +class SpsdkSoftEnum(SpsdkEnum): + """SPSDK Soft Enum type. + + It has API with default values for labels and + descriptions with defaults for non existing members. + """ + + @classmethod + def get_label(cls, tag: int) -> str: + """Get label of enum member with given tag. + + If member not found and default is specified, the default is returned. + + :param tag: Tag to be used for searching + :return: Label of found enum member + """ + try: + return super().get_label(tag) + except SPSDKKeyError: + return f"Unknown ({tag})" + + @classmethod + def get_description(cls, tag: int, default: Optional[str] = None) -> Optional[str]: + """Get description of enum member with given tag. + + :param tag: Tag to be used for searching + :param default: Default value if member contains no description + :return: Description of found enum member + """ + try: + return super().get_description(tag, default) + except SPSDKKeyError: + return f"Unknown ({tag})" diff --git a/pynitrokey/trussed/bootloader/lpc55_upload/utils/usbfilter.py b/pynitrokey/trussed/bootloader/lpc55_upload/utils/usbfilter.py new file mode 100644 index 00000000..86e82f5c --- /dev/null +++ b/pynitrokey/trussed/bootloader/lpc55_upload/utils/usbfilter.py @@ -0,0 +1,296 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +# +# Copyright 2019-2024 NXP +# +# SPDX-License-Identifier: BSD-3-Clause + +"""Module defining a USB filtering class.""" +import platform +import re +from typing import Any, Dict, Optional, Tuple + +from .misc import get_hash + + +class USBDeviceFilter: + """Generic USB Device Filtering class. + + Create a filtering instance. This instance holds the USB ID you are interested + in during USB HID device search and allows you to compare, whether + provided USB HID object is the one you are interested in. + The allowed format of `usb_id` string is following: + + vid or pid - vendor ID or product ID. String holding hex or dec number. + Hex number must be preceded by 0x or 0X. Number of characters after 0x is + 1 - 4. Mixed upper & lower case letters is allowed. e.g. "0xaB12", "0XAB12", + "0x1", "0x0001". + The decimal number is restricted only to have 1 - 5 digits, e.g. "65535" + It's allowed to set the USB filter ID to decimal number "99999", however, as + the USB VID number is four-byte hex number (max value is 65535), this will + lead to zero results. Leading zeros are not allowed e.g. 0001. This will + result as invalid match. + + The user may provide a single number as usb_id. In such a case the number + may represent either VID or PID. By default, the filter expects this number + to be a VID. In rare cases the user may want to filter based on PID. + Initialize the `search_by_pid` parameter to True in such cases. + + vid/pid - string of vendor ID & product ID separated by ':' or ',' + Same rules apply to the number format as in VID case, except, that the + string consists of two numbers separated by ':' or ','. It's not allowed + to mix hex and dec numbers, e.g. "0xab12:12345" is not allowed. + Valid vid/pid strings: + "0x12aB:0xabc", "1,99999" + + Windows specific: + instance ID - String in following format "HID\\VID_&PID_\\", + see instance ID in device manager under Windows OS. + + Linux specific: + USB device path - HID API returns path in following form: + '0003:0002:00' + + The first number represents the Bus, the second Device and the third interface. The Bus:Device + number is unique so interface is not necessary and Bus:Device should be sufficient. + + The Bus:Device can be observed using 'lsusb' command. The interface can be observed using + 'lsusb -t'. lsusb returns the Bus and Device as a 3-digit number. + It has been agreed, that the expected input is: + #, e.g. 3#11 + + Mac specific: + USB device path - HID API returns path in roughly following form: + 'IOService:/AppleACPIPlatformExpert/PCI0@0/AppleACPIPCI/XHC1@14/XHC1@14000000/HS01@14100000/SE + Blank RT Family @14100000/IOUSBHostInterface@0/AppleUserUSBHostHIDDevice' + + This path can be found using the 'ioreg' utility or using 'IO Hardware Registry Explorer' tool. + However, using the system report from 'About This MAC -> System Report -> USB' a partial path + can also be gathered. Using the name of USB device from the 'USB Device Tree' and appending + the 'Location ID' should work. The name can be 'SE Blank RT Family' and the 'Location ID' is + in form / , e.g. '0x14200000 / 18'. + So the 'usb_id' name should be 'SE Blank RT Family @14200000' and the filter should be able to + filter out such device. + """ + + def __init__( + self, + usb_id: Optional[str] = None, + search_by_pid: bool = False, + ): + """Initialize the USB Device Filtering. + + :param usb_id: usb_id string + :param search_by_pid: if true, expects usb_id to be a PID number, VID otherwise. + """ + self.usb_id = usb_id + self.search_by_pid = search_by_pid + + def compare(self, usb_device_object: Dict[str, Any]) -> bool: + """Compares the internal `usb_id` with provided `usb_device_object`. + + The provided USB ID during initialization may be VID or PID, VID/PID pair, + or a path. See private methods for details. + + :param usb_device_object: Libusbsio/HID_API device object (dictionary) + + :return: True on match, False otherwise + """ + # Determine, whether given device matches one of the expected criterion + if self.usb_id is None: + return True + + vendor_id = usb_device_object.get("vendor_id") + product_id = usb_device_object.get("product_id") + serial_number = usb_device_object.get("serial_number") + device_name = usb_device_object.get("device_name") + # the Libusbsio/HID_API holds the path as bytes, so we convert it to string + usb_path_raw = usb_device_object.get("path") + + if usb_path_raw: + if self.usb_id == get_hash(usb_path_raw): + return True + usb_path = self.convert_usb_path(usb_path_raw) + if self._is_path(usb_path=usb_path): + return True + + if self._is_vid_or_pid(vid=vendor_id, pid=product_id): + return True + + if vendor_id and product_id and self._is_vid_pid(vid=vendor_id, pid=product_id): + return True + + if serial_number and self.usb_id.casefold() == serial_number.casefold(): + return True + + if device_name and self.usb_id.casefold() == device_name.casefold(): + return True + + return False + + def _is_path(self, usb_path: str) -> bool: + """Compares the internal usb_id with provided path. + + If the path is a substring of the usb_id, this is considered as a match + and True is returned. + + :param usb_path: path to be compared with usd_id. + :return: true on a match, false otherwise. + """ + # we check the len of usb_id, because usb_id = "" is considered + # to be always in the string returning True, which is not expected + # behavior + # the provided usb string id fully matches the instance ID + usb_id = self.usb_id or "" + if usb_id.casefold() in usb_path.casefold() and len(usb_id) > 0: + return True + + return False + + def _is_vid_or_pid(self, vid: Optional[int], pid: Optional[int]) -> bool: + # match anything starting with 0x or 0X followed by 0-9 or a-f or + # match either 0 or decimal number not starting with zero + # this regex is the same for vid and pid => xid + xid_regex = "0[xX][0-9a-fA-F]{1,4}|0|[1-9][0-9]{0,4}" + usb_id = self.usb_id or "" + if re.fullmatch(xid_regex, usb_id) is not None: + # the string corresponds to the vid/pid specification, check a match + if self.search_by_pid and pid: + if int(usb_id, 0) == pid: + return True + elif vid: + if int(usb_id, 0) == vid: + return True + + return False + + def _is_vid_pid(self, vid: int, pid: int) -> bool: + """If usb_id corresponds to VID/PID pair, compares it with provided vid/pid. + + :param vid: vendor ID to compare. + :param pid: product ID to compare. + :return: true on a match, false otherwise. + """ + # match anything starting with 0x or 0X followed by 0-9 or a-f or + # match either 0 or decimal number not starting with zero + # Above pattern is combined to match a pair corresponding to vid/pid. + vid_pid_regex = "0[xX][0-9a-fA-F]{1,4}(,|:)0[xX][0-9a-fA-F]{1,4}|(0|[1-9][0-9]{0,4})(,|:)(0|[1-9][0-9]{0,4})" + usb_id = self.usb_id or "" + if re.fullmatch(vid_pid_regex, usb_id): + # the string corresponds to the vid/pid specification, check a match + vid_pid = re.split(":|,", usb_id) + if vid == int(vid_pid[0], 0) and pid == int(vid_pid[1], 0): + return True + + return False + + @staticmethod + def convert_usb_path(hid_api_usb_path: bytes) -> str: + """Converts the Libusbsio/HID_API path into string, which can be observed from OS. + + DESIGN REMARK: this function is not part of the USBLogicalDevice, as the + class intention is to be just a simple container. But to help the class + to get the required inputs, this helper method has been provided. Additionally, + this method relies on the fact that the provided path comes from the Libusbsio/HID_API. + This method will most probably fail or provide improper results in case + path from different USB API is provided. + + :param hid_api_usb_path: USB device path from Libusbsio/HID_API + :return: Libusbsio/HID_API path converted for given platform + """ + if platform.system() == "Windows": + device_manager_path = hid_api_usb_path.decode("utf-8").upper() + device_manager_path = device_manager_path.replace("#", "\\") + result = re.search(r"\\\\\?\\(.+?)\\{", device_manager_path) + if result: + device_manager_path = result.group(1) + + return device_manager_path + + if platform.system() == "Linux": + # we expect the path in form of #, Libusbsio/HID_API returns + # :: + linux_path = hid_api_usb_path.decode("utf-8") + linux_path_parts = linux_path.split(":") + + if len(linux_path_parts) > 1: + bus_num, port_chain = linux_path.split('-') + fs_path = linux_path.split(":")[0] + path = f"/sys/bus/usb/devices/{fs_path}/devnum" + devid = open(path).read() + linux_path = f"{bus_num}#{devid}" + + return linux_path + + if platform.system() == "Darwin": + return hid_api_usb_path.decode("utf-8") + + return "" + + +class NXPUSBDeviceFilter(USBDeviceFilter): + """NXP Device Filtering class. + + Extension of the generic USB device filter class to support filtering + based on NXP devices. Modifies the way, how single number is handled. + By default, if single value is provided, it's content is expected to be VID. + However, legacy tooling were expecting PID, so from this perspective if + a single number is provided, we expect that VID is out of range NXP_VIDS. + """ + + NXP_VIDS = [0x1FC9, 0x15A2, 0x0471, 0x0D28] + + def __init__( + self, + usb_id: Optional[str] = None, + nxp_device_names: Optional[Dict[str, Tuple[int, int]]] = None, + ): + """Initialize the USB Device Filtering. + + :param usb_id: usb_id string + :param nxp_device_names: Dictionary holding NXP device vid/pid {"device_name": [vid(int), pid(int)]} + """ + super().__init__(usb_id=usb_id, search_by_pid=True) + self.nxp_device_names = nxp_device_names or {} + + def compare(self, usb_device_object: Any) -> bool: + """Compares the internal `usb_id` with provided `usb_device_object`. + + Extends the comparison by USB names - dictionary of device name and + corresponding VID/PID. + + :param usb_device_object: lpcusbsio USB HID device object + + :return: True on match, False otherwise + """ + vendor_id = usb_device_object["vendor_id"] + product_id = usb_device_object["product_id"] + + if self.usb_id: + if super().compare(usb_device_object=usb_device_object): + return True + + return self._is_nxp_device_name(vendor_id, product_id) + + return self._is_nxp_device(vendor_id) + + def _is_vid_or_pid(self, vid: Optional[int], pid: Optional[int]) -> bool: + if vid and vid in NXPUSBDeviceFilter.NXP_VIDS: + return super()._is_vid_or_pid(vid, pid) + + return False + + def _is_nxp_device_name(self, vid: int, pid: int) -> bool: + nxp_device_name_to_compare = { + k.lower(): v for k, v in self.nxp_device_names.items() + } + assert isinstance(self.usb_id, str) + if self.usb_id.lower() in nxp_device_name_to_compare: + vendor_id, product_id = nxp_device_name_to_compare[self.usb_id.lower()] + if vendor_id == vid and product_id == pid: + return True + return False + + @staticmethod + def _is_nxp_device(vid: int) -> bool: + return vid in NXPUSBDeviceFilter.NXP_VIDS diff --git a/pynitrokey/trussed/utils.py b/pynitrokey/trussed/utils.py index a4b0cc69..d09818be 100644 --- a/pynitrokey/trussed/utils.py +++ b/pynitrokey/trussed/utils.py @@ -12,8 +12,6 @@ from functools import total_ordering from typing import Optional, Sequence -from spsdk.sbfile.misc import BcdVersion3 - @dataclass(order=True, frozen=True) class Uuid: @@ -228,10 +226,6 @@ def from_v_str(cls, s: str) -> "Version": raise ValueError(f"Missing v prefix for firmware version: {s}") return Version.from_str(s[1:]) - @classmethod - def from_bcd_version(cls, version: BcdVersion3) -> "Version": - return cls(major=version.major, minor=version.minor, patch=version.service) - @dataclass class Fido2Certs: diff --git a/pyproject.toml b/pyproject.toml index 67b09f5e..b67e73ee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,15 +24,18 @@ dependencies = [ "certifi >= 14.5.14", "cffi", "click >=8.0, <=8.1.3", - "cryptography >=41.0.4,<44", + "crcmod", + "cryptography >=42.0.4,<44", + "deepmerge", "ecdsa", + "fastjsonschema", "fido2 >=1.1.2,<2", "intelhex", "nkdfu", "python-dateutil ~= 2.7.0", "pyusb", "requests", - "spsdk >=2.0,<2.2", + "ruamel.yaml", "tqdm", "tlv8", "typing_extensions ~= 4.3.0", @@ -40,7 +43,14 @@ dependencies = [ "protobuf >=3.17.3, < 4.0.0", "click-aliases", "semver", - "nethsm >= 1.1.0,<2", + "sly", + "importlib_metadata", + "hidapi", + "hid", + "nethsm >= 1.0.0,<2", + "asn1tools >= 0.166.0", + "pyyaml >= 6.0.1", + "types-PyYAML>= 6.0.1", ] dynamic = ["version", "description"] @@ -51,7 +61,7 @@ dev = [ "flit >=3.2,<4", "ipython", "isort", - "mypy >=1.4,<1.5", + "mypy >=1.9,<1.10", "pyinstaller ~=6.5.0", "pyinstaller-versionfile ==2.1.1; sys_platform=='win32'", "types-requests", @@ -96,6 +106,7 @@ module = [ "pynitrokey.libnk", "pynitrokey.start.*", "pynitrokey.test_secrets_app", + "pynitrokey.trussed.bootloader.lpc55_upload.*", ] check_untyped_defs = false disallow_any_generics = false @@ -138,6 +149,13 @@ module = [ "tlv8.*", "pytest.*", "click_aliases.*", + "sly.*", + "libusbsio.*", + "fastjsonschema", + "deepmerge.*", + "crcmod.*", + "hid.*", + "importlib_metadata.*", ] ignore_missing_imports = true