Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat hpke dhkem #322

Merged
merged 2 commits into from
Oct 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cmd/harp/internal/cmd/transform_decompress.go
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,10 @@ var transformDecompressCmd = func() *cobra.Command {
}

// Compute max decompression size
maxDecompressionSize := int64(params.maxDecompressionGuard) * 1024 * 1024
maxDecompressionSize := uint64(params.maxDecompressionGuard) * 1024 * 1024

// Process input as a stream.
if err := ioutil.Copy(maxDecompressionSize, writer, compressedReader); err != nil {
if _, err := ioutil.LimitCopy(writer, compressedReader, maxDecompressionSize); err != nil {
log.SafeClose(compressedReader, "unable to close the compression writer")
log.For(ctx).Fatal("unable to process input", zap.Error(err))
}
Expand Down
2 changes: 1 addition & 1 deletion pkg/bundle/codec.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ func Load(r io.Reader) (*bundlev1.Bundle, error) {

// Use buffered copy
decoded := &bytes.Buffer{}
if err = ioutil.Copy(maxBundleSize, decoded, r); err != nil {
if _, err = ioutil.LimitCopy(decoded, r, maxBundleSize); err != nil {
return nil, fmt.Errorf("unable to load bundle content")
}

Expand Down
4 changes: 2 additions & 2 deletions pkg/sdk/fsutil/targzfs/builders.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ func FromReader(r io.Reader) (fs.FS, error) {

// Chunked read with hard limit to prevent/reduce zipbomb vulnerability
// exploitation.
if err := ioutil.Copy(maxDecompressedSize, &tarContents, gz); err != nil {
if _, err := ioutil.LimitCopy(&tarContents, gz, maxDecompressedSize); err != nil {
return nil, fmt.Errorf("unable to decompress the archive: %w", err)
}

Expand Down Expand Up @@ -91,7 +91,7 @@ func FromReader(r io.Reader) (fs.FS, error) {

// Chunked read with hard limit to prevent/reduce post decompression
// explosion
if err := ioutil.Copy(maxFileSize, &fileContents, tarReader); err != nil {
if _, err := ioutil.LimitCopy(&fileContents, tarReader, maxFileSize); err != nil {
return nil, fmt.Errorf("unable to copy file content to memory: %w", err)
}

Expand Down
4 changes: 2 additions & 2 deletions pkg/sdk/fsutil/targzfs/fs.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ import (

var (
// Block decompression if the TAR archive is larger than 25MB.
maxDecompressedSize = int64(25 * 1024 * 1024)
maxDecompressedSize = uint64(25 * 1024 * 1024)
// Maximum file size to load in memory (2MB).
maxFileSize = int64(2 * 1024 * 1024)
maxFileSize = uint64(2 * 1024 * 1024)
// Block decompression if the archive has more than 1k files.
maxFileCount = 1000
)
Expand Down
42 changes: 28 additions & 14 deletions pkg/sdk/ioutil/copy.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,37 +7,51 @@ package ioutil

import (
"errors"
"fmt"
"io"
"os"
)

// ErrTruncatedCopy is raised when the copy is larger than expected.
var ErrTruncatedCopy = errors.New("truncated copy due to too large input")

// Copy uses a buffered CopyN and a hardlimit to stop read from the reader when
// the maxSize amount of data has been written to the given writer.
func Copy(maxSize int64, w io.Writer, r io.Reader) error {
contentLength := int64(0)
// LimitCopy uses a buffered CopyN and a hardlimit to stop read from the reader when
// the maxSize amount of data has been written to the given writer and raise an
// error.
func LimitCopy(dst io.Writer, src io.Reader, maxSize uint64) (uint64, error) {
writtenLength := uint64(0)

// Chunked read with hard limit to prevent/reduce zipbomb vulnerability
// exploitation.
// Check arguments
if dst == nil {
return 0, errors.New("writer must not be nil")
}
if src == nil {
return 0, errors.New("reader must not be nil")
}

// Retrieve system pagesize for optimized buffer length
pageSize := os.Getpagesize()

// Chunked read with hard limit to reduce/prevent memory bomb.
for {
written, err := io.CopyN(w, r, 1024)
written, err := io.CopyN(dst, src, int64(pageSize))
if err != nil {
if errors.Is(err, io.EOF) {
writtenLength += uint64(written)
break
}
return err
return writtenLength, fmt.Errorf("unable to stream source data to destination: %w", err)
}

// Add to length
contentLength += written
writtenLength += uint64(written)
}

// Check max size
if contentLength > maxSize {
return ErrTruncatedCopy
}
// Check max size
if writtenLength > maxSize {
return writtenLength, ErrTruncatedCopy
}

// No error
return nil
return writtenLength, nil
}
217 changes: 217 additions & 0 deletions pkg/sdk/security/crypto/hpke/api.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,217 @@
// SPDX-FileCopyrightText: 2019-2023 Thibault NORMAND <[email protected]>
//
// SPDX-License-Identifier: Apache-2.0 AND MIT

// Package hpke provides RFC9180 hybrid public key encryption features.
package hpke

import (
"crypto"
"crypto/aes"
"crypto/cipher"
"crypto/sha256"
"crypto/sha512"
"errors"
"fmt"
"hash"
"io"

"golang.org/x/crypto/chacha20poly1305"
"golang.org/x/crypto/hkdf"
"zntr.io/harp/v2/pkg/sdk/security/crypto/kem"
)

type mode uint8

const (
modeBase mode = 0x00
modePsk mode = 0x01
modeAuth mode = 0x02
modeAuthPsk mode = 0x03
)

// -----------------------------------------------------------------------------

type KEM uint16

//nolint:stylecheck
const (
// KEM_P256_HKDF_SHA256 is a KEM using P-256 curve and HKDF with SHA-256.
KEM_P256_HKDF_SHA256 KEM = 0x10
// KEM_P384_HKDF_SHA384 is a KEM using P-384 curve and HKDF with SHA-384.
KEM_P384_HKDF_SHA384 KEM = 0x11
// KEM_P521_HKDF_SHA512 is a KEM using P-521 curve and HKDF with SHA-512.
KEM_P521_HKDF_SHA512 KEM = 0x12
// KEM_X25519_HKDF_SHA256 is a KEM using X25519 Diffie-Hellman function
// and HKDF with SHA-256.
KEM_X25519_HKDF_SHA256 KEM = 0x20
)

func (k KEM) Scheme() kem.Scheme {
switch k {
case KEM_P256_HKDF_SHA256:
return kem.DHP256HKDFSHA256()
case KEM_P384_HKDF_SHA384:
return kem.DHP384HKDFSHA384()
case KEM_P521_HKDF_SHA512:
return kem.DHP521HKDFSHA512()
case KEM_X25519_HKDF_SHA256:
return kem.DHX25519HKDFSHA256()
default:
panic("invalid kem suite")
}
}

func (k KEM) IsValid() bool {
switch k {
case KEM_P256_HKDF_SHA256, KEM_P384_HKDF_SHA384, KEM_P521_HKDF_SHA512,
KEM_X25519_HKDF_SHA256:
return true
default:
return false
}
}

// -----------------------------------------------------------------------------

type KDF uint16

//nolint:stylecheck
const (
// KDF_HKDF_SHA256 is a KDF using HKDF with SHA-256.
KDF_HKDF_SHA256 KDF = 0x01
// KDF_HKDF_SHA384 is a KDF using HKDF with SHA-384.
KDF_HKDF_SHA384 KDF = 0x02
// KDF_HKDF_SHA512 is a KDF using HKDF with SHA-512.
KDF_HKDF_SHA512 KDF = 0x03
)

func (k KDF) IsValid() bool {
switch k {
case KDF_HKDF_SHA256, KDF_HKDF_SHA384, KDF_HKDF_SHA512:
return true
default:
return false
}
}

func (k KDF) ExtractSize() uint16 {
switch k {
case KDF_HKDF_SHA256:
return uint16(crypto.SHA256.Size())
case KDF_HKDF_SHA384:
return uint16(crypto.SHA384.Size())
case KDF_HKDF_SHA512:
return uint16(crypto.SHA512.Size())
default:
panic("invalid hash")
}
}

func (k KDF) Extract(secret, salt []byte) []byte {
return hkdf.Extract(k.hash(), secret, salt)
}

func (k KDF) Expand(prk, labeledInfo []byte, outputLen uint16) ([]byte, error) {
extractSize := k.ExtractSize()
// https://www.rfc-editor.org/rfc/rfc9180.html#kdf-input-length
if len(prk) < int(extractSize) {
return nil, fmt.Errorf("pseudorandom key must be at least %d bytes", extractSize)
}
// https://www.rfc-editor.org/rfc/rfc9180.html#name-secret-export
if maxLength := 255 * extractSize; outputLen > maxLength {
return nil, fmt.Errorf("expansion length is limited to %d", maxLength)
}

r := hkdf.Expand(k.hash(), prk, labeledInfo)
out := make([]byte, outputLen)
if _, err := io.ReadFull(r, out); err != nil {
return nil, fmt.Errorf("unable to generate value from kdf: %w", err)
}

return out, nil
}

func (k KDF) hash() func() hash.Hash {
switch k {
case KDF_HKDF_SHA256:
return sha256.New
case KDF_HKDF_SHA384:
return sha512.New384
case KDF_HKDF_SHA512:
return sha512.New
default:
panic("invalid hash")
}
}

// -----------------------------------------------------------------------------

type AEAD uint16

//nolint:stylecheck
const (
// AEAD_AES128GCM is AES-128 block cipher in Galois Counter Mode (GCM).
AEAD_AES128GCM AEAD = 0x01
// AEAD_AES256GCM is AES-256 block cipher in Galois Counter Mode (GCM).
AEAD_AES256GCM AEAD = 0x02
// AEAD_ChaCha20Poly1305 is ChaCha20 stream cipher and Poly1305 MAC.
AEAD_ChaCha20Poly1305 AEAD = 0x03
// AEAD_EXPORT_ONLY is reserved for applications that only use the Exporter
// interface.
AEAD_EXPORT_ONLY AEAD = 0xFFFF
)

func (a AEAD) IsValid() bool {
switch a {
case AEAD_AES128GCM, AEAD_AES256GCM, AEAD_ChaCha20Poly1305, AEAD_EXPORT_ONLY:
return true
default:
return false
}
}

func (a AEAD) New(key []byte) (cipher.AEAD, error) {
switch a {
case AEAD_AES128GCM, AEAD_AES256GCM:
block, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
return cipher.NewGCM(block)
case AEAD_ChaCha20Poly1305:
return chacha20poly1305.New(key)
case AEAD_EXPORT_ONLY:
return nil, errors.New("AEAD cipher can't be initialized in export-only mode")
default:
panic("invalid aead")
}
}

func (a AEAD) KeySize() uint16 {
switch a {
case AEAD_AES128GCM:
return 16
case AEAD_AES256GCM:
return 32
case AEAD_ChaCha20Poly1305:
return chacha20poly1305.KeySize
case AEAD_EXPORT_ONLY:
return 0
default:
panic("invalid aead")
}
}

func (a AEAD) NonceSize() uint16 {
switch a {
case AEAD_AES128GCM,
AEAD_AES256GCM,
AEAD_ChaCha20Poly1305:
return 12
case AEAD_EXPORT_ONLY:
return 0
default:
panic("invalid aead")
}
}
Loading
Loading