diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 000000000..e2a66021d --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,248 @@ +# This code is licensed under the terms of the MIT license. + +run: + # Timeout for analysis, e.g. 30s, 5m. + # Default: 1m + timeout: 3m + +# This file contains only configs which differ from defaults. +# All possible options can be found here https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml +linters-settings: + cyclop: + # The maximal code complexity to report. + # Default: 10 + max-complexity: 30 + # The maximal average package complexity. + # If it's higher than 0.0 (float) the check is enabled + # Default: 0.0 + package-average: 10.0 + + errcheck: + # Report about not checking of errors in type assertions: `a := b.(MyStruct)`. + # Such cases aren't reported by default. + # Default: false + check-type-assertions: true + + funlen: + # Checks the number of lines in a function. + # If lower than 0, disable the check. + # Default: 60 + lines: 100 + # Checks the number of statements in a function. + # If lower than 0, disable the check. + # Default: 40 + statements: 50 + + gocognit: + # Minimal code complexity to report + # Default: 30 (but we recommend 10-20) + min-complexity: 20 + + gomnd: + # List of function patterns to exclude from analysis. + # Values always ignored: `time.Date` + # Default: [] + ignored-functions: + - os.Chmod + - os.Mkdir + - os.MkdirAll + - os.OpenFile + - os.WriteFile + - strconv.FormatFloat + - strconv.FormatInt + - strconv.FormatUint + - strconv.ParseFloat + - strconv.ParseInt + - strconv.ParseUint + + govet: + # Enable all analyzers. + # Default: false + enable-all: true + # Disable analyzers by name. + # Run `go tool vet help` to see all analyzers. + # Default: [] + disable: + - fieldalignment # too strict + - shadow # too many false positives + + nakedret: + # Make an issue if func has more lines of code than this setting, and it has naked returns. + # Default: 30 + max-func-lines: 10 + + nolintlint: + # Exclude following linters from requiring an explanation. + # Default: [] + allow-no-explanation: [ funlen, gocognit, lll ] + # Enable to require an explanation of nonzero length after each nolint directive. + # Default: false + require-explanation: true + # Enable to require nolint directives to mention the specific linter being suppressed. + # Default: false + require-specific: true + + tenv: + # The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures. + # Otherwise, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked. + # Default: false + all: true + + gosec: + excludes: + - G107 # variables in URLs + - G404 # use of weak random generator + +linters: + disable-all: true + enable: + ## enabled by default + - errcheck # checking for unchecked errors, these unchecked errors can be critical bugs in some cases + - gosimple # specializes in simplifying a code + - govet # reports suspicious constructs, such as Printf calls whose arguments do not align with the format string + - ineffassign # detects when assignments to existing variables are not used + - staticcheck # is a go vet on steroids, applying a ton of static analysis checks + - typecheck # like the front-end of a Go compiler, parses and type-checks Go code + - unused # checks for unused constants, variables, functions and types + ## disabled by default + - asasalint # checks for pass []any as any in variadic func(...any) + - asciicheck # checks that your code does not contain non-ASCII identifiers + - bidichk # checks for dangerous unicode character sequences + - bodyclose # checks whether HTTP response body is closed successfully + #- contextcheck # checks the function whether use a non-inherited context # TODO: enable after golangci-lint uses https://github.com/sylvia7788/contextcheck/releases/tag/v1.0.7 + - cyclop # checks function and package cyclomatic complexity + - dupl # tool for code clone detection + - durationcheck # checks for two durations multiplied together + - errname # checks that sentinel errors are prefixed with the Err and error types are suffixed with the Error + - errorlint # finds code that will cause problems with the error wrapping scheme introduced in Go 1.13 + - execinquery # checks query string in Query function which reads your Go src files and warning it finds + - exhaustive # checks exhaustiveness of enum switch statements + - exportloopref # checks for pointers to enclosing loop variables + - forbidigo # forbids identifiers + - funlen # tool for detection of long functions + #- gochecknoglobals # checks that no global variables exist + #- gochecknoinits # checks that no init functions are present in Go code + - gocognit # computes and checks the cognitive complexity of functions + - goconst # finds repeated strings that could be replaced by a constant + - gocritic # provides diagnostics that check for bugs, performance and style issues + - gocyclo # computes and checks the cyclomatic complexity of functions + #- godot # checks if comments end in a period + #- goimports # in addition to fixing imports, goimports also formats your code in the same style as gofmt + #- gomnd # detects magic numbers - ENABLE LATER + - gomoddirectives # manages the use of 'replace', 'retract', and 'excludes' directives in go.mod + - gomodguard # allow and block lists linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations + - goprintffuncname # checks that printf-like functions are named with f at the end + - gosec # inspects source code for security problems + - lll # reports long lines + #- makezero # finds slice declarations with non-zero initial length + - nakedret # finds naked returns in functions greater than a specified function length + - nestif # reports deeply nested if statements + - nilerr # finds the code that returns nil even if it checks that the error is not nil + - nilnil # checks that there is no simultaneous return of nil error and an invalid value + #- noctx # finds sending http request without context.Context + - nolintlint # reports ill-formed or insufficient nolint directives + #- nonamedreturns # reports all named returns + - nosprintfhostport # checks for misuse of Sprintf to construct a host with port in a URL + - predeclared # finds code that shadows one of Go's predeclared identifiers + #- promlinter # checks Prometheus metrics naming via promlint + - reassign # checks that package variables are not reassigned + - revive # fast, configurable, extensible, flexible, and beautiful linter for Go, drop-in replacement of golint + #- rowserrcheck # checks whether Err of rows is checked successfully + #- sqlclosecheck # checks that sql.Rows and sql.Stmt are closed + - stylecheck # is a replacement for golint + - tenv # detects using os.Setenv instead of t.Setenv since Go1.17 + #- testpackage # makes you use a separate _test package + - tparallel # detects inappropriate usage of t.Parallel() method in your Go test codes + #- unconvert # removes unnecessary type conversions + - unparam # reports unused function parameters + - usestdlibvars # detects the possibility to use variables/constants from the Go standard library + #- wastedassign # finds wasted assignment statements + #- whitespace # detects leading and trailing whitespace + + ## you may want to enable + #- decorder # checks declaration order and count of types, constants, variables and functions + #- exhaustruct # checks if all structure fields are initialized + #- gci # controls golang package import order and makes it always deterministic + - godox # detects FIXME, TODO and other comment keywords + #- goheader # checks is file header matches to pattern + - interfacebloat # checks the number of methods inside an interface + #- ireturn # accept interfaces, return concrete types + #- prealloc # [premature optimization, but can be used in some cases] finds slice declarations that could potentially be preallocated + #- varnamelen # [great idea, but too many false positives] checks that the length of a variable's name matches its scope + #- wrapcheck # checks that errors returned from external packages are wrapped + + ## disabled + #- containedctx # detects struct contained context.Context field + #- depguard # [replaced by gomodguard] checks if package imports are in a list of acceptable packages + #- dogsled # checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) + #- errchkjson # [don't see profit + I'm against of omitting errors like in the first example https://github.com/breml/errchkjson] checks types passed to the json encoding functions. Reports unsupported types and optionally reports occasions, where the check for the returned error can be omitted + #- forcetypeassert # [replaced by errcheck] finds forced type assertions + #- goerr113 # [too strict] checks the errors handling expressions + #- gofmt # [replaced by goimports] checks whether code was gofmt-ed + #- gofumpt # [replaced by goimports, gofumports is not available yet] checks whether code was gofumpt-ed + #- grouper # analyzes expression groups + #- importas # enforces consistent import aliases + #- logrlint # [owner archived repository] checks logr arguments + #- maintidx # measures the maintainability index of each function + #- misspell # [useless] finds commonly misspelled English words in comments + #- nlreturn # [too strict and mostly code is not more readable] checks for a new line before return and branch statements to increase code clarity + #- paralleltest # [too many false positives] detects missing usage of t.Parallel() method in your Go test + #- tagliatelle # checks the struct tags + #- thelper # detects golang test helpers without t.Helper() call and checks the consistency of test helpers + #- wsl # [too strict and mostly code is not more readable] whitespace linter forces you to use empty lines + + ## deprecated + #- deadcode # [deprecated, replaced by unused] finds unused code + #- exhaustivestruct # [deprecated, replaced by exhaustruct] checks if all struct's fields are initialized + #- golint # [deprecated, replaced by revive] golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes + #- ifshort # [deprecated] checks that your code uses short syntax for if-statements whenever possible + #- interfacer # [deprecated] suggests narrower interface types + #- maligned # [deprecated, replaced by govet fieldalignment] detects Go structs that would take less memory if their fields were sorted + #- nosnakecase # [deprecated, replaced by revive var-naming] detects snake case of variable naming and function name + #- scopelint # [deprecated, replaced by exportloopref] checks for unpinned variables in go programs + #- structcheck # [deprecated, replaced by unused] finds unused struct fields + #- varcheck # [deprecated, replaced by unused] finds unused global variables and constants + + +issues: + # Maximum count of issues with the same text. + # Set to 0 to disable. + # Default: 3 + max-same-issues: 50 + + exclude-rules: + - source: "^//\\s*go:generate\\s" + linters: [ lll ] + - source: "(noinspection|TODO)" + linters: [ godot ] + - source: "//noinspection" + linters: [ gocritic ] + - source: "^\\s+if _, ok := err\\.\\([^.]+\\.InternalError\\); ok {" + linters: [ errorlint ] + - path: ".skeleton" + linters: + - unused + - unparam + - path: ".*_test.go" + linters: + - bodyclose + - dupl + - errcheck + - funlen + - gochecknoglobals + - gocognit + - goconst + - gosec + - lll + - noctx + - unparam + - wrapcheck + - linters: + - gocritic + text: "appendAssign:" + - linters: + - revive + text: "unexported-return:" + - linters: + - govet + text: "shadow: declaration of \"err\" shadows declaration" diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 7c7749d61..000000000 --- a/.travis.yml +++ /dev/null @@ -1,17 +0,0 @@ -language: go - -go: - - "1.15.x" - -go_import_path: go.dedis.ch/kyber/v3 - -script: GO111MODULE=on make test - -notifications: - email: false - -# https://restic.net/blog/2018-09-02/travis-build-cache -cache: - directories: - - $HOME/.cache/go-build - - $GOPATH/pkg/mod diff --git a/Makefile b/Makefile index ab8eb79cc..f5b2c46aa 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,27 @@ -.DEFAULT_GOAL := test +tidy: + #go install honnef.co/go/tools/cmd/staticcheck@latest + go mod tidy -Coding/bin/Makefile.base: - git clone https://github.com/dedis/Coding -include Coding/bin/Makefile.base +generate: tidy + go generate ./... + +# Coding style static check. +lint: tidy + @echo "Please setup a linter!" + #golangci-lint run + #staticcheck go list ./... + + +vet: tidy + go vet ./... + +test: tidy + go test ./... + +coverage: tidy + go test -json -covermode=count -coverprofile=profile.cov ./... > report.json + +# target to run all the possible checks; it's a good habit to run it before +# pushing code +check: lint vet test + echo "check done" diff --git a/README.md b/README.md index 2d302e4eb..23dd7549e 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ -[![Docs](https://img.shields.io/badge/docs-current-brightgreen.svg)](https://godoc.org/go.dedis.ch/kyber) -[![Build Status](https://travis-ci.org/dedis/kyber.svg?branch=master)](https://travis-ci.org/dedis/kyber) +[![Go test](https://github.com/dedis/kyber/actions/workflows/go_tests.yml/badge.svg)](https://github.com/dedis/kyber/actions/workflows/go_tests.yml) +[![Coverage Status](https://coveralls.io/repos/github/dedis/kyber/badge.svg?branch=master)](https://coveralls.io/github/dedis/kyber?branch=master) +[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=dedis_kyber&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=dedis_kyber) +[![Go Reference](https://pkg.go.dev/badge/github.com/dedis/kyber.svg)](https://pkg.go.dev/github.com/dedis/kyber) DEDIS Advanced Crypto Library for Go ==================================== @@ -17,6 +19,16 @@ and you need to constrain Kyber to offering only constant time implementations, you should use the [suites.RequireConstantTime()](https://godoc.org/go.dedis.ch/kyber/suites#RequireConstantTime) function in the `init()` function of your `main` package. +Target Audience +--------------- + +This library is intended to be used by developers who are at least moderately knowledgeable about cryptography. +If you want a crypto library that makes it easy to implement "basic crypto" functionality correctly - i.e., plain public-key encryption and signing - then [NaCl secretbox](https://godoc.org/golang.org/x/crypto/nacl/secretbox) may be a better choice. +Or use Google's [Tink](https://developers.google.com/tink) + +This toolkit's purpose is to make it possible - and preferably easy - to do slightly more interesting things that most current crypto libraries don't support effectively. +The one existing crypto library that this toolkit is probably most comparable to is the [Charm](https://charm-crypto.com/category/charm) rapid prototyping library for Python. + Versioning - Development ------------------------ diff --git a/examples/bn256_enc_test.go b/examples/bn256_enc_test.go new file mode 100644 index 000000000..4c8f27d21 --- /dev/null +++ b/examples/bn256_enc_test.go @@ -0,0 +1,87 @@ +package examples + +import ( + "fmt" + + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/pairing" + "go.dedis.ch/kyber/v3/pairing/bn256" + "go.dedis.ch/kyber/v3/util/random" +) + +func ElGamalEncryptBn256(suite pairing.Suite, pubkey kyber.Point, message []byte) ( + K, C kyber.Point, remainder []byte) { + + // Embed the message (or as much of it as will fit) into a curve point. + M := suite.G1().Point().Embed(message, random.New()) + max := suite.G1().Point().EmbedLen() + if max > len(message) { + max = len(message) + } + remainder = message[max:] + // ElGamal-encrypt the point to produce ciphertext (K,C). + k := suite.G1().Scalar().Pick(random.New()) // ephemeral private key + K = suite.G1().Point().Mul(k, nil) // ephemeral DH public key + S := suite.G1().Point().Mul(k, pubkey) // ephemeral DH shared secret + C = suite.G1().Point().Add(S, M) // message blinded with secret + return +} + +func ElGamalDecryptBn256(suite pairing.Suite, prikey kyber.Scalar, K, C kyber.Point) ( + message []byte, err error) { + + // ElGamal-decrypt the ciphertext (K,C) to reproduce the message. + S := suite.G1().Point().Mul(prikey, K) // regenerate shared secret + M := suite.G1().Point().Sub(C, S) // use to un-blind the message + message, err = M.Data() // extract the embedded data + return +} + +/* +This example illustrates how the crypto toolkit may be used +to perform "pure" ElGamal encryption, +in which the message to be encrypted is small enough to be embedded +directly within a group element (e.g., in an elliptic curve point). +For basic background on ElGamal encryption see for example +http://en.wikipedia.org/wiki/ElGamal_encryption. + +Most public-key crypto libraries tend not to support embedding data in points, +in part because for "vanilla" public-key encryption you don't need it: +one would normally just generate an ephemeral Diffie-Hellman secret +and use that to seed a symmetric-key crypto algorithm such as AES, +which is much more efficient per bit and works for arbitrary-length messages. +However, in many advanced public-key crypto algorithms it is often useful +to be able to embedded data directly into points and compute with them: +as just one of many examples, +the proactively verifiable anonymous messaging scheme prototyped in Verdict +(see http://dedis.cs.yale.edu/dissent/papers/verdict-abs). + +For fancier versions of ElGamal encryption implemented in this toolkit +see for example anon.Encrypt, which encrypts a message for +one of several possible receivers forming an explicit anonymity set. +*/ +func Example_elGamalEncryption_bn256() { + suite := bn256.NewSuiteBn256() + + // Create a public/private keypair + a := suite.G1().Scalar().Pick(suite.RandomStream()) // Alice's private key + A := suite.G1().Point().Mul(a, nil) // Alice's public key + + // ElGamal-encrypt a message using the public key. + m := []byte("The quick brown fox") + K, C, _ := ElGamalEncryptBn256(suite, A, m) + + // Decrypt it using the corresponding private key. + mm, err := ElGamalDecryptBn256(suite, a, K, C) + + // Make sure it worked! + if err != nil { + fmt.Println("decryption failed: " + err.Error()) + } else if string(mm) != string(m) { + fmt.Println("decryption produced wrong output: " + string(mm)) + } else { + fmt.Println("Decryption succeeded: " + string(mm)) + } + // Output: + // Decryption succeeded: The quick brown fox +} diff --git a/examples/neff_shuffle_test.go b/examples/neff_shuffle_test.go new file mode 100644 index 000000000..aca1c69c7 --- /dev/null +++ b/examples/neff_shuffle_test.go @@ -0,0 +1,95 @@ +package examples + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3" + kproof "go.dedis.ch/kyber/v3/proof" + "go.dedis.ch/kyber/v3/shuffle" +) + +// This example illustrates how to use the Neff shuffle protocol with simple, +// single pairs. +func Test_Example_Neff_Shuffle_Simple(t *testing.T) { + numPairs := 3 + + // generate random pairs + ks := make([]kyber.Point, numPairs) + cs := make([]kyber.Point, numPairs) + + for i := 0; i < numPairs; i++ { + c := suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + k := suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + + ks[i] = k + cs[i] = c + } + + // shuffle the pairs + xx, yy, prover := shuffle.Shuffle(suite, nil, nil, ks, cs, suite.RandomStream()) + + // compute the proof + proof, err := kproof.HashProve(suite, "PairShuffle", prover) + require.NoError(t, err) + + // check the proof + verifier := shuffle.Verifier(suite, nil, nil, ks, cs, xx, yy) + + err = kproof.HashVerify(suite, "PairShuffle", verifier, proof) + require.NoError(t, err) +} + +// This example illustrates how to use the Neff shuffle protocol on sequences of +// pairs. The single pair protocol (see above) uses as inputs one-dimensional +// slices. This variation uses 2-dimensional slices, where the number of columns +// defines the number of sequences, and the number of rows defines the length of +// sequences. There is also a difference when getting the prover. In this +// variation the Shuffle function doesn't directly return a prover, but a +// function to get it. This is because the verifier must provide a slice of +// random numbers to the prover. +func Test_Example_Neff_Shuffle_Sequence(t *testing.T) { + sequenceLen := 3 + numSequences := 3 + + X := make([][]kyber.Point, numSequences) + Y := make([][]kyber.Point, numSequences) + + // generate random sequences + for i := 0; i < numSequences; i++ { + xs := make([]kyber.Point, sequenceLen) + ys := make([]kyber.Point, sequenceLen) + + for j := 0; j < sequenceLen; j++ { + xs[j] = suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + ys[j] = suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + } + + X[i] = xs + Y[i] = ys + } + + // shuffle sequences + XX, YY, getProver := shuffle.SequencesShuffle(suite, nil, nil, X, Y, suite.RandomStream()) + + // compute the proof + NQ := len(X) + e := make([]kyber.Scalar, NQ) + for j := 0; j < NQ; j++ { + e[j] = suite.Scalar().Pick(suite.RandomStream()) + } + + prover, err := getProver(e) + require.NoError(t, err) + + proof, err := kproof.HashProve(suite, "SequencesShuffle", prover) + require.NoError(t, err) + + // check the proof + XXUp, YYUp, XXDown, YYDown := shuffle.GetSequenceVerifiable(suite, X, Y, XX, YY, e) + + verifier := shuffle.Verifier(suite, nil, nil, XXUp, YYUp, XXDown, YYDown) + + err = kproof.HashVerify(suite, "SequencesShuffle", verifier, proof) + require.NoError(t, err) +} diff --git a/go.mod b/go.mod index dfc3a3781..f88951501 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,13 @@ module go.dedis.ch/kyber/v3 require ( + github.com/cloudflare/circl v1.3.7 github.com/kilic/bls12-381 v0.1.0 github.com/stretchr/testify v1.3.0 go.dedis.ch/fixbuf v1.0.3 go.dedis.ch/protobuf v1.0.11 - golang.org/x/crypto v0.0.0-20190123085648-057139ce5d2b - golang.org/x/sys v0.0.0-20201101102859-da207088b7d1 + golang.org/x/crypto v0.17.0 + golang.org/x/sys v0.15.0 ) go 1.13 diff --git a/go.sum b/go.sum index 85b33d63c..76bce07da 100644 --- a/go.sum +++ b/go.sum @@ -1,3 +1,6 @@ +github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= +github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= +github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/kilic/bls12-381 v0.1.0 h1:encrdjqKMEvabVQ7qYOKu1OvhqpK4s47wDYtNiPtlp4= @@ -7,20 +10,54 @@ github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZN github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= go.dedis.ch/fixbuf v1.0.3 h1:hGcV9Cd/znUxlusJ64eAlExS+5cJDIyTyEG+otu5wQs= go.dedis.ch/fixbuf v1.0.3/go.mod h1:yzJMt34Wa5xD37V5RTdmp38cz3QhMagdGoem9anUalw= -go.dedis.ch/kyber/v3 v3.0.4 h1:FDuC/S3STkvwxZ0ooo3gcp56QkUKsN7Jy7cpzBxL+vQ= go.dedis.ch/kyber/v3 v3.0.4/go.mod h1:OzvaEnPvKlyrWyp3kGXlFdp7ap1VC6RkZDTaPikqhsQ= go.dedis.ch/kyber/v3 v3.0.9/go.mod h1:rhNjUUg6ahf8HEg5HUvVBYoWY4boAafX8tYxX+PS+qg= -go.dedis.ch/protobuf v1.0.5 h1:EbF1czEKICxf5KY8Tm7wMF28hcOQbB6yk4IybIFWTYE= go.dedis.ch/protobuf v1.0.5/go.mod h1:eIV4wicvi6JK0q/QnfIEGeSFNG0ZeB24kzut5+HaRLo= -go.dedis.ch/protobuf v1.0.7 h1:wRUEiq3u0/vBhLjcw9CmAVrol+BnDyq2M0XLukdphyI= go.dedis.ch/protobuf v1.0.7/go.mod h1:pv5ysfkDX/EawiPqcW3ikOxsL5t+BqnV6xHSmE79KI4= go.dedis.ch/protobuf v1.0.11 h1:FTYVIEzY/bfl37lu3pR4lIj+F9Vp1jE8oh91VmxKgLo= go.dedis.ch/protobuf v1.0.11/go.mod h1:97QR256dnkimeNdfmURz0wAMNVbd1VmLXhG1CrTYrJ4= -golang.org/x/crypto v0.0.0-20190123085648-057139ce5d2b h1:Elez2XeF2p9uyVj0yEUDqQ56NFcDtcBNkYP7yv8YbUE= golang.org/x/crypto v0.0.0-20190123085648-057139ce5d2b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/sys v0.0.0-20190124100055-b90733256f2e h1:3GIlrlVLfkoipSReOMNAgApI0ajnalyLa/EZHHca/XI= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k= +golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= +golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= +golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= +golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190124100055-b90733256f2e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20201101102859-da207088b7d1 h1:a/mKvvZr9Jcc8oKfcmgzyp7OwF73JPWsQLvH1z2Kxck= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20201101102859-da207088b7d1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= +golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= +golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= +golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= +golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/group.go b/group.go index 09612c751..567ab6faa 100644 --- a/group.go +++ b/group.go @@ -112,6 +112,10 @@ type Point interface { Mul(s Scalar, p Point) Point } +type HashablePoint interface { + Hash([]byte) Point +} + // AllowsVarTime allows callers to determine if a given kyber.Scalar // or kyber.Point supports opting-in to variable time operations. If // an object implements AllowsVarTime, then the caller can use @@ -163,3 +167,14 @@ type Group interface { PointLen() int // Max length of point in bytes Point() Point // Create new point } + +// SubGroupElement allows to verify if a Point is in the correct group or not. +// For curves which don't have a prime order, we need to only consider the +// points lying in the subgroup of prime order. That check returns true if the +// point is correct or not. If the curve forms already a prime order// group, +// then this method should be implemented as a nop returning true, to be able to +// use the Schnorr signature scheme for example. +type SubGroupElement interface { + Point + IsInCorrectGroup() bool +} diff --git a/group/curve25519/basic.go b/group/curve25519/basic.go index 9eefb431e..7ef007d96 100644 --- a/group/curve25519/basic.go +++ b/group/curve25519/basic.go @@ -1,3 +1,4 @@ +//go:build experimental // +build experimental package curve25519 diff --git a/group/curve25519/basic_test.go b/group/curve25519/basic_test.go index 7667de15a..b5936ac6d 100644 --- a/group/curve25519/basic_test.go +++ b/group/curve25519/basic_test.go @@ -1,3 +1,4 @@ +//go:build experimental // +build experimental package curve25519 diff --git a/pairing/bls12381/LICENSE b/pairing/bls12381/LICENSE deleted file mode 100644 index 98b4197e0..000000000 --- a/pairing/bls12381/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2020 drand - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/pairing/bls12381/bls12381_test.go b/pairing/bls12381/bls12381_test.go new file mode 100644 index 000000000..249ea7179 --- /dev/null +++ b/pairing/bls12381/bls12381_test.go @@ -0,0 +1,170 @@ +package bls12381 + +import ( + "crypto/rand" + "fmt" + "testing" + + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/pairing" + circl "go.dedis.ch/kyber/v3/pairing/bls12381/circl" + kilic "go.dedis.ch/kyber/v3/pairing/bls12381/kilic" + "go.dedis.ch/kyber/v3/sign/bls" + "go.dedis.ch/kyber/v3/util/random" +) + +var ( + dataSize = 32 + numSigs = []int{1, 10, 100, 1000, 10000} + curveOptions = []string{"kilic", "circl"} +) + +// Used to avoid compiler optimizations +// https://www.practical-go-lessons.com/chap-34-benchmarks#:~:text=This%20variable%20is%20just%20here%20to%20avoid%20compiler%20optimization +var result interface{} + +func BenchmarkKilic(b *testing.B) { + BLSBenchmark(b, "kilic") +} + +func BenchmarkCircl(b *testing.B) { + BLSBenchmark(b, "circl") +} + +func BLSBenchmark(b *testing.B, curveOption string) { + b.Logf("----------------------") + b.Logf("Payload to sign: %d bytes\n", dataSize) + b.Logf("Numbers of signatures: %v\n", numSigs) + b.Logf("Curve options: %v\n", curveOptions) + b.Logf("----------------------") + + // Initialize all variables. + msgData := make([]byte, dataSize) + nBytes, err := rand.Read(msgData) + if err != nil { + panic(err) + } + if nBytes != dataSize { + panic(fmt.Errorf("only read %d random bytes, but data size is %d", nBytes, dataSize)) + } + + randSource := random.New(rand.Reader) + var suite pairing.Suite + if curveOption == "kilic" { + suite = kilic.NewBLS12381Suite() + } else if curveOption == "circl" { + suite = circl.NewSuiteBLS12381() + } else { + panic(fmt.Errorf("invalid curve option: %s", curveOption)) + } + + schemeOnG1 := bls.NewSchemeOnG1(suite) + schemeOnG2 := bls.NewSchemeOnG2(suite) + + maxN := 1 + for _, s := range numSigs { + if maxN < s { + maxN = s + } + } + + privKeysOnG1 := make([]kyber.Scalar, maxN) + privKeysOnG2 := make([]kyber.Scalar, maxN) + pubKeysOnG1 := make([]kyber.Point, maxN) + pubKeysOnG2 := make([]kyber.Point, maxN) + sigsOnG1 := make([][]byte, maxN) + sigsOnG2 := make([][]byte, maxN) + + for i := 0; i < maxN; i++ { + privKeysOnG1[i], pubKeysOnG1[i] = schemeOnG1.NewKeyPair(randSource) + sigsOnG1[i], err = schemeOnG1.Sign(privKeysOnG1[i], msgData) + if err != nil { + panic(err) + } + privKeysOnG2[i], pubKeysOnG2[i] = schemeOnG2.NewKeyPair(randSource) + sigsOnG2[i], err = schemeOnG2.Sign(privKeysOnG2[i], msgData) + if err != nil { + panic(err) + } + } + + for _, n := range numSigs { + // Benchmark aggregation of public keys + b.Run(fmt.Sprintf("AggregatePublicKeys-G1 on %d signs", n), func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result = schemeOnG1.AggregatePublicKeys(pubKeysOnG1[:n]...) + } + }) + b.Run(fmt.Sprintf("AggregatePublicKeys-G2 on %d signs", n), func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result = schemeOnG2.AggregatePublicKeys(pubKeysOnG2[:n]...) + } + }) + + // Benchmark aggregation of signatures + b.Run(fmt.Sprintf("AggregateSign-G1 on %d signs", n), func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, err = schemeOnG1.AggregateSignatures(sigsOnG1[:n]...) + if err != nil { + panic(err) + } + } + }) + b.Run(fmt.Sprintf("AggregateSign-G1 on %d signs", n), func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, err = schemeOnG2.AggregateSignatures(sigsOnG2[:n]...) + if err != nil { + panic(err) + } + } + }) + } + + // Benchmark keygen + b.Run("KeyGen-G1", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, _ = schemeOnG1.NewKeyPair(randSource) + } + }) + b.Run("KeyGen-G2", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, _ = schemeOnG2.NewKeyPair(randSource) + } + }) + + // Benchmark sign + b.Run("Sign-G1", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, err = schemeOnG1.Sign(privKeysOnG1[0], msgData) + if err != nil { + panic(err) + } + } + }) + b.Run("Sign-G2", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + result, err = schemeOnG2.Sign(privKeysOnG2[0], msgData) + if err != nil { + panic(err) + } + } + }) + + // Benchmark verify + b.Run("Verify-G1", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + err = schemeOnG1.Verify(pubKeysOnG1[0], msgData, sigsOnG1[0]) + if err != nil { + panic(err) + } + } + }) + b.Run("Verify-G2", func(bb *testing.B) { + for j := 0; j < bb.N; j++ { + err = schemeOnG2.Verify(pubKeysOnG2[0], msgData, sigsOnG2[0]) + if err != nil { + panic(err) + } + } + }) +} diff --git a/pairing/bls12381/circl/adapter.go b/pairing/bls12381/circl/adapter.go new file mode 100644 index 000000000..2c1af4b37 --- /dev/null +++ b/pairing/bls12381/circl/adapter.go @@ -0,0 +1,48 @@ +package circl + +import ( + "go.dedis.ch/kyber/v3" +) + +// SuiteBLS12381 is an adapter that implements the suites.Suite interface so that +// bls12381 can be used as a common suite to generate key pairs for instance but +// still preserves the properties of the pairing (e.g. the Pair function). +// +// It's important to note that the Point function will generate a point +// compatible with public keys only (group G2) where the signature must be +// used as a point from the group G1. +type SuiteBLS12381 struct { + Suite + kyber.Group +} + +// NewSuiteBLS12381 makes a new BN256 suite +func NewSuiteBLS12381() *SuiteBLS12381 { + return &SuiteBLS12381{} +} + +// Point generates a point from the G2 group that can only be used +// for public keys +func (s *SuiteBLS12381) Point() kyber.Point { + return s.G2().Point() +} + +// PointLen returns the length of a G2 point +func (s *SuiteBLS12381) PointLen() int { + return s.G2().PointLen() +} + +// Scalar generates a scalar +func (s *SuiteBLS12381) Scalar() kyber.Scalar { + return s.G1().Scalar() +} + +// ScalarLen returns the lenght of a scalar +func (s *SuiteBLS12381) ScalarLen() int { + return s.G1().ScalarLen() +} + +// String returns the name of the suite +func (s *SuiteBLS12381) String() string { + return "circl.adapter" +} diff --git a/pairing/bls12381/circl/adapter_test.go b/pairing/bls12381/circl/adapter_test.go new file mode 100644 index 000000000..f9af07f7f --- /dev/null +++ b/pairing/bls12381/circl/adapter_test.go @@ -0,0 +1,28 @@ +package circl + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3/util/key" +) + +func TestAdapter_SuiteBLS12381(t *testing.T) { + suite := NewSuiteBLS12381() + + pair := key.NewKeyPair(suite) + pubkey, err := pair.Public.MarshalBinary() + require.Nil(t, err) + privkey, err := pair.Private.MarshalBinary() + require.Nil(t, err) + + pubhex := suite.Point() + err = pubhex.UnmarshalBinary(pubkey) + require.Nil(t, err) + + privhex := suite.Scalar() + err = privhex.UnmarshalBinary(privkey) + require.Nil(t, err) + + require.Equal(t, "circl.adapter", suite.String()) +} diff --git a/pairing/bls12381/circl/g1.go b/pairing/bls12381/circl/g1.go new file mode 100644 index 000000000..a47d89b48 --- /dev/null +++ b/pairing/bls12381/circl/g1.go @@ -0,0 +1,95 @@ +package circl + +import ( + "crypto/cipher" + "io" + + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" +) + +var _ kyber.SubGroupElement = &G1Elt{} + +type G1Elt struct{ inner bls12381.G1 } + +func (p *G1Elt) MarshalBinary() (data []byte, err error) { return p.inner.BytesCompressed(), nil } + +func (p *G1Elt) UnmarshalBinary(data []byte) error { return p.inner.SetBytes(data) } + +func (p *G1Elt) String() string { return p.inner.String() } + +func (p *G1Elt) MarshalSize() int { return bls12381.G1SizeCompressed } + +func (p *G1Elt) MarshalTo(w io.Writer) (int, error) { + buf, err := p.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +func (p *G1Elt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, p.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, p.UnmarshalBinary(buf) +} + +func (p *G1Elt) Equal(p2 kyber.Point) bool { x := p2.(*G1Elt); return p.inner.IsEqual(&x.inner) } + +func (p *G1Elt) Null() kyber.Point { p.inner.SetIdentity(); return p } + +func (p *G1Elt) Base() kyber.Point { p.inner = *bls12381.G1Generator(); return p } + +func (p *G1Elt) Pick(rand cipher.Stream) kyber.Point { + var buf [32]byte + rand.XORKeyStream(buf[:], buf[:]) + p.inner.Hash(buf[:], nil) + return p +} + +func (p *G1Elt) Set(p2 kyber.Point) kyber.Point { p.inner = p2.(*G1Elt).inner; return p } + +func (p *G1Elt) Clone() kyber.Point { return new(G1Elt).Set(p) } + +func (p *G1Elt) EmbedLen() int { + panic("bls12-381: unsupported operation") +} + +func (p *G1Elt) Embed(data []byte, r cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (p *G1Elt) Data() ([]byte, error) { + panic("bls12-381: unsupported operation") +} + +func (p *G1Elt) Add(a, b kyber.Point) kyber.Point { + aa, bb := a.(*G1Elt), b.(*G1Elt) + p.inner.Add(&aa.inner, &bb.inner) + return p +} + +func (p *G1Elt) Sub(a, b kyber.Point) kyber.Point { return p.Add(a, new(G1Elt).Neg(b)) } + +func (p *G1Elt) Neg(a kyber.Point) kyber.Point { + p.Set(a) + p.inner.Neg() + return p +} + +func (p *G1Elt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + if q == nil { + q = new(G1Elt).Base() + } + ss, qq := s.(*Scalar), q.(*G1Elt) + p.inner.ScalarMult(&ss.inner, &qq.inner) + return p +} + +func (p *G1Elt) IsInCorrectGroup() bool { return p.inner.IsOnG1() } + +func (p *G1Elt) Hash(msg []byte) kyber.Point { p.inner.Hash(msg, nil); return p } +func (p *G1Elt) Hash2(msg, dst []byte) kyber.Point { p.inner.Hash(msg, dst); return p } diff --git a/pairing/bls12381/circl/g2.go b/pairing/bls12381/circl/g2.go new file mode 100644 index 000000000..f727254be --- /dev/null +++ b/pairing/bls12381/circl/g2.go @@ -0,0 +1,95 @@ +package circl + +import ( + "crypto/cipher" + "io" + + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" +) + +var _ kyber.SubGroupElement = &G2Elt{} + +type G2Elt struct{ inner bls12381.G2 } + +func (p *G2Elt) MarshalBinary() (data []byte, err error) { return p.inner.BytesCompressed(), nil } + +func (p *G2Elt) UnmarshalBinary(data []byte) error { return p.inner.SetBytes(data) } + +func (p *G2Elt) String() string { return p.inner.String() } + +func (p *G2Elt) MarshalSize() int { return bls12381.G2SizeCompressed } + +func (p *G2Elt) MarshalTo(w io.Writer) (int, error) { + buf, err := p.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +func (p *G2Elt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, p.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, p.UnmarshalBinary(buf) +} + +func (p *G2Elt) Equal(p2 kyber.Point) bool { x := p2.(*G2Elt); return p.inner.IsEqual(&x.inner) } + +func (p *G2Elt) Null() kyber.Point { p.inner.SetIdentity(); return p } + +func (p *G2Elt) Base() kyber.Point { p.inner = *bls12381.G2Generator(); return p } + +func (p *G2Elt) Pick(rand cipher.Stream) kyber.Point { + var buf [32]byte + rand.XORKeyStream(buf[:], buf[:]) + p.inner.Hash(buf[:], nil) + return p +} + +func (p *G2Elt) Set(p2 kyber.Point) kyber.Point { p.inner = p2.(*G2Elt).inner; return p } + +func (p *G2Elt) Clone() kyber.Point { return new(G2Elt).Set(p) } + +func (p *G2Elt) EmbedLen() int { + panic("bls12-381: unsupported operation") +} + +func (p *G2Elt) Embed(data []byte, r cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (p *G2Elt) Data() ([]byte, error) { + panic("bls12-381: unsupported operation") +} + +func (p *G2Elt) Add(a, b kyber.Point) kyber.Point { + aa, bb := a.(*G2Elt), b.(*G2Elt) + p.inner.Add(&aa.inner, &bb.inner) + return p +} + +func (p *G2Elt) Sub(a, b kyber.Point) kyber.Point { return p.Add(a, new(G2Elt).Neg(b)) } + +func (p *G2Elt) Neg(a kyber.Point) kyber.Point { + p.Set(a) + p.inner.Neg() + return p +} + +func (p *G2Elt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + if q == nil { + q = new(G2Elt).Base() + } + ss, qq := s.(*Scalar), q.(*G2Elt) + p.inner.ScalarMult(&ss.inner, &qq.inner) + return p +} + +func (p *G2Elt) IsInCorrectGroup() bool { return p.inner.IsOnG2() } + +func (p *G2Elt) Hash(msg []byte) kyber.Point { p.inner.Hash(msg, nil); return p } +func (p *G2Elt) Hash2(msg, dst []byte) kyber.Point { p.inner.Hash(msg, dst); return p } diff --git a/pairing/bls12381/circl/group.go b/pairing/bls12381/circl/group.go new file mode 100644 index 000000000..8fa45dc09 --- /dev/null +++ b/pairing/bls12381/circl/group.go @@ -0,0 +1,23 @@ +package circl + +import ( + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" +) + +var ( + G1 kyber.Group = &groupBls{name: "bls12-381.G1", newPoint: func() kyber.Point { return new(G1Elt).Null() }} + G2 kyber.Group = &groupBls{name: "bls12-381.G2", newPoint: func() kyber.Point { return new(G2Elt).Null() }} + GT kyber.Group = &groupBls{name: "bls12-381.GT", newPoint: func() kyber.Point { return new(GTElt).Null() }} +) + +type groupBls struct { + name string + newPoint func() kyber.Point +} + +func (g groupBls) String() string { return g.name } +func (g groupBls) ScalarLen() int { return bls12381.ScalarSize } +func (g groupBls) Scalar() kyber.Scalar { return new(Scalar).SetInt64(0) } +func (g groupBls) PointLen() int { return g.newPoint().MarshalSize() } +func (g groupBls) Point() kyber.Point { return g.newPoint() } diff --git a/pairing/bls12381/circl/gt.go b/pairing/bls12381/circl/gt.go new file mode 100644 index 000000000..d6f37d14a --- /dev/null +++ b/pairing/bls12381/circl/gt.go @@ -0,0 +1,92 @@ +package circl + +import ( + "crypto/cipher" + "io" + + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" +) + +var gtBase *bls12381.Gt + +func init() { + gtBase = bls12381.Pair(bls12381.G1Generator(), bls12381.G2Generator()) +} + +var _ kyber.Point = >Elt{} + +type GTElt struct{ inner bls12381.Gt } + +func (p *GTElt) MarshalBinary() (data []byte, err error) { return p.inner.MarshalBinary() } + +func (p *GTElt) UnmarshalBinary(data []byte) error { return p.inner.UnmarshalBinary(data) } + +func (p *GTElt) String() string { return p.inner.String() } + +func (p *GTElt) MarshalSize() int { return bls12381.GtSize } + +func (p *GTElt) MarshalTo(w io.Writer) (int, error) { + buf, err := p.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +func (p *GTElt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, p.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, p.UnmarshalBinary(buf) +} + +func (p *GTElt) Equal(p2 kyber.Point) bool { x := p2.(*GTElt); return p.inner.IsEqual(&x.inner) } + +func (p *GTElt) Null() kyber.Point { p.inner.SetIdentity(); return p } + +func (p *GTElt) Base() kyber.Point { p.inner = *gtBase; return p } + +func (p *GTElt) Pick(rand cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (p *GTElt) Set(p2 kyber.Point) kyber.Point { p.inner = p2.(*GTElt).inner; return p } + +func (p *GTElt) Clone() kyber.Point { return new(GTElt).Set(p) } + +func (p *GTElt) EmbedLen() int { + panic("bls12-381: unsupported operation") +} + +func (p *GTElt) Embed(data []byte, r cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (p *GTElt) Data() ([]byte, error) { + panic("bls12-381: unsupported operation") +} + +func (p *GTElt) Add(a, b kyber.Point) kyber.Point { + aa, bb := a.(*GTElt), b.(*GTElt) + p.inner.Mul(&aa.inner, &bb.inner) + return p +} + +func (p *GTElt) Sub(a, b kyber.Point) kyber.Point { + return p.Add(a, new(GTElt).Neg(b)) +} + +func (p *GTElt) Neg(a kyber.Point) kyber.Point { + aa := a.(*GTElt) + p.inner.Inv(&aa.inner) + return p +} + +func (p *GTElt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + qq, ss := q.(*GTElt), s.(*Scalar) + p.inner.Exp(&qq.inner, &ss.inner) + return p +} diff --git a/pairing/bls12381/circl/scalar.go b/pairing/bls12381/circl/scalar.go new file mode 100644 index 000000000..01f0b81fa --- /dev/null +++ b/pairing/bls12381/circl/scalar.go @@ -0,0 +1,117 @@ +package circl + +import ( + "crypto/cipher" + "io" + + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" +) + +var _ kyber.Scalar = &Scalar{} + +type Scalar struct{ inner bls12381.Scalar } + +func (s *Scalar) MarshalBinary() (data []byte, err error) { return s.inner.MarshalBinary() } + +func (s *Scalar) UnmarshalBinary(data []byte) error { return s.inner.UnmarshalBinary(data) } + +func (s *Scalar) String() string { return s.inner.String() } + +func (s *Scalar) MarshalSize() int { return bls12381.ScalarSize } + +func (s *Scalar) MarshalTo(w io.Writer) (int, error) { + buf, err := s.inner.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +func (s *Scalar) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, s.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, s.inner.UnmarshalBinary(buf) +} + +func (s *Scalar) Equal(s2 kyber.Scalar) bool { + x := s2.(*Scalar) + return s.inner.IsEqual(&x.inner) == 1 +} + +func (s *Scalar) Set(a kyber.Scalar) kyber.Scalar { + aa := a.(*Scalar) + s.inner.Set(&aa.inner) + return s +} + +func (s *Scalar) Clone() kyber.Scalar { return new(Scalar).Set(s) } + +func (s *Scalar) SetInt64(v int64) kyber.Scalar { + if v >= 0 { + s.inner.SetUint64(uint64(v)) + } else { + s.inner.SetUint64(uint64(-v)) + s.inner.Neg() + } + + return s +} + +func (s *Scalar) Zero() kyber.Scalar { s.inner.SetUint64(0); return s } + +func (s *Scalar) Add(a, b kyber.Scalar) kyber.Scalar { + aa, bb := a.(*Scalar), b.(*Scalar) + s.inner.Add(&aa.inner, &bb.inner) + return s +} + +func (s *Scalar) Sub(a, b kyber.Scalar) kyber.Scalar { + aa, bb := a.(*Scalar), b.(*Scalar) + s.inner.Sub(&aa.inner, &bb.inner) + return s +} + +func (s *Scalar) Neg(a kyber.Scalar) kyber.Scalar { + s.Set(a) + s.inner.Neg() + return s +} + +func (s *Scalar) One() kyber.Scalar { s.inner.SetUint64(1); return s } + +func (s *Scalar) Mul(a, b kyber.Scalar) kyber.Scalar { + aa, bb := a.(*Scalar), b.(*Scalar) + s.inner.Mul(&aa.inner, &bb.inner) + return s +} + +func (s *Scalar) Div(a, b kyber.Scalar) kyber.Scalar { return s.Mul(new(Scalar).Inv(b), a) } + +func (s *Scalar) Inv(a kyber.Scalar) kyber.Scalar { + aa := a.(*Scalar) + s.inner.Inv(&aa.inner) + return s +} + +type zeroReader struct{} + +func (zeroReader) Read(p []byte) (n int, err error) { + for i := range p { + p[i] = 0 + } + return len(p), nil +} + +func (s *Scalar) Pick(stream cipher.Stream) kyber.Scalar { + err := s.inner.Random(cipher.StreamReader{S: stream, R: zeroReader{}}) + if err != nil { + panic(err) + } + return s +} + +func (s *Scalar) SetBytes(data []byte) kyber.Scalar { s.inner.SetBytes(data); return s } diff --git a/pairing/bls12381/circl/suite.go b/pairing/bls12381/circl/suite.go new file mode 100644 index 000000000..0104cab0f --- /dev/null +++ b/pairing/bls12381/circl/suite.go @@ -0,0 +1,59 @@ +package circl + +import ( + "crypto/cipher" + "crypto/sha256" + "hash" + "io" + + bls12381 "github.com/cloudflare/circl/ecc/bls12381" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/pairing" + "go.dedis.ch/kyber/v3/util/random" + "go.dedis.ch/kyber/v3/xof/blake2xb" +) + +var _ pairing.Suite = Suite{} + +type Suite struct{} + +func NewSuite() (s Suite) { return } + +func (s Suite) String() string { return "bls12381" } +func (s Suite) G1() kyber.Group { return G1 } +func (s Suite) G2() kyber.Group { return G2 } +func (s Suite) GT() kyber.Group { return GT } +func (s Suite) Pair(p1, p2 kyber.Point) kyber.Point { + aa, bb := p1.(*G1Elt), p2.(*G2Elt) + return >Elt{*bls12381.Pair(&aa.inner, &bb.inner)} +} +func (s Suite) ValidatePairing(p1, p2, p3, p4 kyber.Point) bool { + a, b := p1.(*G1Elt), p2.(*G2Elt) + c, d := p3.(*G1Elt), p4.(*G2Elt) + out := bls12381.ProdPairFrac( + []*bls12381.G1{&a.inner, &c.inner}, + []*bls12381.G2{&b.inner, &d.inner}, + []int{1, -1}, + ) + return out.IsIdentity() +} + +func (s Suite) Read(r io.Reader, objs ...interface{}) error { + panic("Suite.Read(): deprecated in kyber") +} + +func (s Suite) Write(w io.Writer, objs ...interface{}) error { + panic("Suite.Write(): deprecated in kyber") +} + +func (s Suite) Hash() hash.Hash { + return sha256.New() +} + +func (s Suite) XOF(seed []byte) kyber.XOF { + return blake2xb.New(seed) +} + +func (s Suite) RandomStream() cipher.Stream { + return random.New() +} diff --git a/pairing/bls12381/suite_test.go b/pairing/bls12381/circl/suite_test.go similarity index 80% rename from pairing/bls12381/suite_test.go rename to pairing/bls12381/circl/suite_test.go index c64c4d1e0..ec70260ba 100644 --- a/pairing/bls12381/suite_test.go +++ b/pairing/bls12381/circl/suite_test.go @@ -1,18 +1,19 @@ -package bls12381 +package circl import ( "bytes" "crypto/cipher" + "sync" "testing" "github.com/stretchr/testify/require" "go.dedis.ch/kyber/v3" - + "go.dedis.ch/kyber/v3/sign/bls" + "go.dedis.ch/kyber/v3/sign/tbls" + "go.dedis.ch/kyber/v3/sign/test" "go.dedis.ch/kyber/v3/util/random" ) -// Code extracted from kyber/utils/test -// TODO: expose API in forked drand/kyber // Apply a generic set of validation tests to a cryptographic Group, // using a given source of [pseudo-]randomness. // @@ -72,7 +73,14 @@ func testGroup(t *testing.T, g kyber.Group, rand cipher.Stream) []kyber.Point { } // Verify additive and multiplicative identities of the generator. + // TODO: Check GT exp + /*fmt.Println("Inverse of base")*/ + //f := ptmp.Base().(*KyberGT).f + //newFp12(nil).inverse(f, f) + //fmt.Printf("\n-Inverse: %v\n", f) + //fmt.Println("Multiply by -1") ptmp.Mul(stmp.SetInt64(-1), nil).Add(ptmp, gen) + /*fmt.Printf(" \n\nChecking equality additive identity\nptmp: %v \n\n zero %v\n", ptmp, pzero)*/ if !ptmp.Equal(pzero) { t.Fatalf("generator additive identity doesn't work: (scalar -1 %v) %v (x) -1 (+) %v = %v != %v the group point identity", stmp.SetInt64(-1), ptmp.Mul(stmp.SetInt64(-1), nil), gen, ptmp.Mul(stmp.SetInt64(-1), nil).Add(ptmp, gen), pzero) @@ -113,6 +121,7 @@ func testGroup(t *testing.T, g kyber.Group, rand cipher.Stream) []kyber.Point { } // Zero and One identity secrets + //println("dh1^0 = ",ptmp.Mul(dh1, szero).String()) if !ptmp.Mul(szero, dh1).Equal(pzero) { t.Fatalf("Encryption with secret=0 didn't work: %v (x) %v == %v != %v", szero, dh1, ptmp, pzero) } @@ -169,6 +178,11 @@ func testGroup(t *testing.T, g kyber.Group, rand cipher.Stream) []kyber.Point { pick := func(rand cipher.Stream) (p kyber.Point) { defer func() { + /*if err := recover(); err != nil {*/ + //// TODO implement Pick for GT + //p = g.Point().Mul(g.Scalar().Pick(rand), nil) + //return + /*}*/ }() p = g.Point().Pick(rand) return @@ -246,16 +260,16 @@ func GroupTest(t *testing.T, g kyber.Group) { testGroup(t, g, random.New()) } -func TestG1(t *testing.T) { - GroupTest(t, newGroupG1()) +func TestKyberG1(t *testing.T) { + GroupTest(t, G1) } -func TestG2(t *testing.T) { - GroupTest(t, newGroupG2()) +func TestKyberG2(t *testing.T) { + GroupTest(t, G2) } -func TestPairingG2(t *testing.T) { - s := NewSuite() +func TestKyberPairingG2(t *testing.T) { + s := Suite{} a := s.G1().Scalar().Pick(s.RandomStream()) b := s.G2().Scalar().Pick(s.RandomStream()) aG := s.G1().Point().Mul(a, nil) @@ -275,8 +289,26 @@ func TestPairingG2(t *testing.T) { require.False(t, p1.Equal(pRandom)) } +func TestRacePairings(t *testing.T) { + s := Suite{} + a := s.G1().Scalar().Pick(s.RandomStream()) + aG := s.G1().Point().Mul(a, nil) + B := s.G2().Point().Pick(s.RandomStream()) + aB := s.G2().Point().Mul(a, B.Clone()) + wg := sync.WaitGroup{} + for i := 0; i < 10; i++ { + wg.Add(1) + go func() { + // e(p1,p2) =?= e(inv1^-1, inv2^-1) + s.ValidatePairing(aG, B, s.G1().Point(), aB) + wg.Done() + }() + } + wg.Wait() +} + func BenchmarkPairingSeparate(bb *testing.B) { - s := NewSuite() + s := Suite{} a := s.G1().Scalar().Pick(s.RandomStream()) b := s.G2().Scalar().Pick(s.RandomStream()) aG := s.G1().Point().Mul(a, nil) @@ -297,7 +329,7 @@ func BenchmarkPairingSeparate(bb *testing.B) { } func BenchmarkPairingInv(bb *testing.B) { - s := NewSuite() + s := Suite{} a := s.G1().Scalar().Pick(s.RandomStream()) b := s.G2().Scalar().Pick(s.RandomStream()) aG := s.G1().Point().Mul(a, nil) @@ -312,16 +344,40 @@ func BenchmarkPairingInv(bb *testing.B) { } } +func TestKyberBLSG2(t *testing.T) { + suite := Suite{} + scheme := bls.NewSchemeOnG2(suite) + test.SchemeTesting(t, scheme) +} + +func TestKyberBLSG1(t *testing.T) { + suite := Suite{} + scheme := bls.NewSchemeOnG1(suite) + test.SchemeTesting(t, scheme) +} + +func TestKyberThresholdG2(t *testing.T) { + suite := Suite{} + tscheme := tbls.NewThresholdSchemeOnG2(suite) + test.ThresholdTest(t, suite.G1(), tscheme) +} + +func TestKyberThresholdG1(t *testing.T) { + suite := Suite{} + tscheme := tbls.NewThresholdSchemeOnG1(suite) + test.ThresholdTest(t, suite.G2(), tscheme) +} + func TestIsValidGroup(t *testing.T) { - suite := NewSuite() + suite := Suite{} p1 := suite.G1().Point().Pick(random.New()) p2 := suite.G1().Point().Pick(random.New()) - require.True(t, p1.(GroupChecker).IsInCorrectGroup()) - require.True(t, p2.(GroupChecker).IsInCorrectGroup()) + require.True(t, p1.(kyber.SubGroupElement).IsInCorrectGroup()) + require.True(t, p2.(kyber.SubGroupElement).IsInCorrectGroup()) } -var suite = NewSuite() +var suite = Suite{} func NewElement() kyber.Scalar { return suite.G1().Scalar() @@ -361,6 +417,21 @@ func TestBasicPairing(t *testing.T) { dG := NewG2().Mul(d, nil) right2 := Pair(NewG1(), dG) right := suite.GT().Point().Add(right1, right2) + require.True(t, left.Equal(right)) + + // Test if addition works in GT + mright := right.Clone().Neg(right) + res := mright.Add(mright, right) + require.True(t, res.Equal(suite.GT().Point().Null())) + + // Test if Sub works in GT + expZero := right.Clone().Sub(right, right) + require.True(t, expZero.Equal(suite.GT().Point().Null())) + // Test if scalar mul works in GT + // e(aG,G) == e(G,G)^a + left = Pair(aG, suite.G2().Point().Base()) + right = Pair(suite.G1().Point().Base(), suite.G2().Point().Base()) + right = right.Mul(a, right) require.True(t, left.Equal(right)) } diff --git a/pairing/bls12381/group.go b/pairing/bls12381/group.go deleted file mode 100644 index b088b2e53..000000000 --- a/pairing/bls12381/group.go +++ /dev/null @@ -1,114 +0,0 @@ -package bls12381 - -import ( - "crypto/cipher" - "crypto/sha256" - "hash" - - "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/util/random" - "go.dedis.ch/kyber/v3/xof/blake2xb" -) - -// GroupChecker allows to verify if a Point is in the correct group or not. For -// curves which don't have a prime order, we need to only consider the points -// lying in the subgroup of prime order. That check returns true if the point is -// correct or not. -type GroupChecker interface { - kyber.Point - IsInCorrectGroup() bool -} - -type common struct { - isPrime bool -} - -func (g *common) Scalar() kyber.Scalar { - return NewKyberScalar() -} - -func (g *common) ScalarLen() int { - return g.Scalar().MarshalSize() -} - -func (g *common) IsPrimeOrder() bool { - return g.isPrime -} - -func (g *common) Hash() hash.Hash { - return sha256.New() -} - -// XOF returns a newlly instantiated blake2xb XOF function. -func (g *common) XOF(seed []byte) kyber.XOF { - return blake2xb.New(seed) -} - -// RandomStream returns a cipher.Stream which corresponds to a key stream from -// crypto/rand. -func (g *common) RandomStream() cipher.Stream { - return random.New() -} - -type groupG1 struct { - common - *commonSuite -} - -func (g *groupG1) String() string { - return "bls12381.G1" -} - -func (g *groupG1) Point() kyber.Point { - return nullpointG1() -} - -func (g *groupG1) PointLen() int { - return g.Point().MarshalSize() -} - -type groupG2 struct { - common - *commonSuite -} - -func (g *groupG2) String() string { - return "bls12381.G2" -} - -func (g *groupG2) Point() kyber.Point { - return nullpointG2() -} - -func (g *groupG2) PointLen() int { - return g.Point().MarshalSize() -} - -type groupGT struct { - common - *commonSuite -} - -func (g *groupGT) String() string { - return "bls12381.GT" -} - -func (g *groupGT) Point() kyber.Point { - return newEmptyGT() -} - -func (g *groupGT) PointLen() int { - return g.Point().MarshalSize() -} - -func newGroupG1() kyber.Group { - return &groupG1{ - common: common{isPrime: true}, - } -} - -func newGroupG2() kyber.Group { - return &groupG2{ - common: common{isPrime: false}, - } -} diff --git a/pairing/bls12381/kilic/adapter.go b/pairing/bls12381/kilic/adapter.go new file mode 100644 index 000000000..6fdf21115 --- /dev/null +++ b/pairing/bls12381/kilic/adapter.go @@ -0,0 +1,48 @@ +package kilic + +import ( + "go.dedis.ch/kyber/v3" +) + +// SuiteBLS12381 is an adapter that implements the suites.Suite interface so that +// bls12381 can be used as a common suite to generate key pairs for instance but +// still preserves the properties of the pairing (e.g. the Pair function). +// +// It's important to note that the Point function will generate a point +// compatible with public keys only (group G2) where the signature must be +// used as a point from the group G1. +type SuiteBLS12381 struct { + Suite + kyber.Group +} + +// NewSuiteBLS12381 makes a new BN256 suite +func NewSuiteBLS12381() *SuiteBLS12381 { + return &SuiteBLS12381{} +} + +// Point generates a point from the G2 group that can only be used +// for public keys +func (s *SuiteBLS12381) Point() kyber.Point { + return s.G2().Point() +} + +// PointLen returns the length of a G2 point +func (s *SuiteBLS12381) PointLen() int { + return s.G2().PointLen() +} + +// Scalar generates a scalar +func (s *SuiteBLS12381) Scalar() kyber.Scalar { + return s.G1().Scalar() +} + +// ScalarLen returns the lenght of a scalar +func (s *SuiteBLS12381) ScalarLen() int { + return s.G1().ScalarLen() +} + +// String returns the name of the suite +func (s *SuiteBLS12381) String() string { + return "kilic.adapter" +} diff --git a/pairing/bls12381/kilic/adapter_test.go b/pairing/bls12381/kilic/adapter_test.go new file mode 100644 index 000000000..12fe8457b --- /dev/null +++ b/pairing/bls12381/kilic/adapter_test.go @@ -0,0 +1,28 @@ +package kilic + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3/util/key" +) + +func TestAdapter_SuiteBLS12381(t *testing.T) { + suite := NewSuiteBLS12381() + + pair := key.NewKeyPair(suite) + pubkey, err := pair.Public.MarshalBinary() + require.Nil(t, err) + privkey, err := pair.Private.MarshalBinary() + require.Nil(t, err) + + pubhex := suite.Point() + err = pubhex.UnmarshalBinary(pubkey) + require.Nil(t, err) + + privhex := suite.Scalar() + err = privhex.UnmarshalBinary(privkey) + require.Nil(t, err) + + require.Equal(t, "kilic.adapter", suite.String()) +} diff --git a/pairing/bls12381/kilic/g1.go b/pairing/bls12381/kilic/g1.go new file mode 100644 index 000000000..796470772 --- /dev/null +++ b/pairing/bls12381/kilic/g1.go @@ -0,0 +1,172 @@ +package kilic + +import ( + "bytes" + "crypto/cipher" + "encoding/hex" + "io" + + bls12381 "github.com/kilic/bls12-381" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/group/mod" +) + +// domainG1 is the DST used for hash to curve on G1, this is the default from the RFC. +var domainG1 = []byte("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_") + +func DefaultDomainG1() []byte { + return domainG1 +} + +// G1Elt is a kyber.Point holding a G1 point on BLS12-381 curve +type G1Elt struct { + p *bls12381.PointG1 + // domain separation tag. We treat a 0 len dst as the default value as per the RFC "Tags MUST have nonzero length" + dst []byte + + kyber.Point + kyber.HashablePoint +} + +func NullG1(dst ...byte) *G1Elt { + var p bls12381.PointG1 + return newG1(&p, dst) +} +func newG1(p *bls12381.PointG1, dst []byte) *G1Elt { + domain := dst + if bytes.Equal(dst, domainG1) { + domain = nil + } + return &G1Elt{p: p, dst: domain} +} + +func (k *G1Elt) Equal(k2 kyber.Point) bool { + k2g1, ok := k2.(*G1Elt) + if !ok { + return false + } + return bls12381.NewG1().Equal(k.p, k2g1.p) && bytes.Equal(k.dst, k2g1.dst) +} + +func (k *G1Elt) Null() kyber.Point { + return newG1(bls12381.NewG1().Zero(), k.dst) +} + +func (k *G1Elt) Base() kyber.Point { + return newG1(bls12381.NewG1().One(), k.dst) +} + +func (k *G1Elt) Pick(rand cipher.Stream) kyber.Point { + var dst, src [32]byte + rand.XORKeyStream(dst[:], src[:]) + return k.Hash(dst[:]) +} + +func (k *G1Elt) Set(q kyber.Point) kyber.Point { + k.p.Set(q.(*G1Elt).p) + return k +} + +func (k *G1Elt) Clone() kyber.Point { + var p bls12381.PointG1 + p.Set(k.p) + return newG1(&p, k.dst) +} + +func (k *G1Elt) EmbedLen() int { + panic("bls12-381: unsupported operation") +} + +func (k *G1Elt) Embed(data []byte, rand cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (k *G1Elt) Data() ([]byte, error) { + panic("bls12-381: unsupported operation") +} + +func (k *G1Elt) Add(a, b kyber.Point) kyber.Point { + aa := a.(*G1Elt) + bb := b.(*G1Elt) + bls12381.NewG1().Add(k.p, aa.p, bb.p) + return k +} + +func (k *G1Elt) Sub(a, b kyber.Point) kyber.Point { + aa := a.(*G1Elt) + bb := b.(*G1Elt) + bls12381.NewG1().Sub(k.p, aa.p, bb.p) + return k +} + +func (k *G1Elt) Neg(a kyber.Point) kyber.Point { + aa := a.(*G1Elt) + bls12381.NewG1().Neg(k.p, aa.p) + return k +} + +func (k *G1Elt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + if q == nil { + q = NullG1(k.dst...).Base() + } + bls12381.NewG1().MulScalarBig(k.p, q.(*G1Elt).p, &s.(*mod.Int).V) + return k +} + +// MarshalBinary returns a compressed point, without any domain separation tag information +func (k *G1Elt) MarshalBinary() ([]byte, error) { + // we need to clone the point because of https://github.com/kilic/bls12-381/issues/37 + // in order to avoid risks of race conditions. + t := new(bls12381.PointG1).Set(k.p) + return bls12381.NewG1().ToCompressed(t), nil +} + +// UnmarshalBinary populates the point from a compressed point representation. +func (k *G1Elt) UnmarshalBinary(buff []byte) error { + var err error + k.p, err = bls12381.NewG1().FromCompressed(buff) + return err +} + +// MarshalTo writes a compressed point to the Writer, without any domain separation tag information +func (k *G1Elt) MarshalTo(w io.Writer) (int, error) { + buf, err := k.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +// UnmarshalFrom populates the point from a compressed point representation read from the Reader. +func (k *G1Elt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, k.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, k.UnmarshalBinary(buf) +} + +func (k *G1Elt) MarshalSize() int { + return 48 +} + +func (k *G1Elt) String() string { + b, _ := k.MarshalBinary() + return "bls12-381.G1: " + hex.EncodeToString(b) +} + +func (k *G1Elt) Hash(m []byte) kyber.Point { + domain := domainG1 + // We treat a 0 len dst as the default value as per the RFC "Tags MUST have nonzero length" + if len(k.dst) != 0 { + domain = k.dst + } + p, _ := bls12381.NewG1().HashToCurve(m, domain) + k.p = p + return k +} + +func (k *G1Elt) IsInCorrectGroup() bool { + return bls12381.NewG1().InCorrectSubgroup(k.p) +} diff --git a/pairing/bls12381/kilic/g2.go b/pairing/bls12381/kilic/g2.go new file mode 100644 index 000000000..8a7837ec0 --- /dev/null +++ b/pairing/bls12381/kilic/g2.go @@ -0,0 +1,171 @@ +package kilic + +import ( + "bytes" + "crypto/cipher" + "encoding/hex" + "io" + + bls12381 "github.com/kilic/bls12-381" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/group/mod" +) + +// domainG2 is the DST used for hash to curve on G2, this is the default from the RFC. +// This is compatible with the paired library > v18 +var domainG2 = []byte("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_") + +func DefaultDomainG2() []byte { + return domainG2 +} + +// G2Elt is a kyber.Point holding a G2 point on BLS12-381 curve +type G2Elt struct { + p *bls12381.PointG2 + // domain separation tag. We treat a 0 len dst as the default value as per the RFC "Tags MUST have nonzero length" + dst []byte +} + +func NullG2(dst ...byte) *G2Elt { + var p bls12381.PointG2 + return newG2(&p, dst) +} + +func newG2(p *bls12381.PointG2, dst []byte) *G2Elt { + domain := dst + if bytes.Equal(dst, domainG2) { + domain = nil + } + return &G2Elt{p: p, dst: domain} +} + +func (k *G2Elt) Equal(k2 kyber.Point) bool { + k2g2, ok := k2.(*G2Elt) + if !ok { + return false + } + return bls12381.NewG2().Equal(k.p, k2g2.p) && bytes.Equal(k.dst, k2g2.dst) +} + +func (k *G2Elt) Null() kyber.Point { + return newG2(bls12381.NewG2().Zero(), k.dst) +} + +func (k *G2Elt) Base() kyber.Point { + return newG2(bls12381.NewG2().One(), k.dst) +} + +func (k *G2Elt) Pick(rand cipher.Stream) kyber.Point { + var dst, src [32]byte + rand.XORKeyStream(dst[:], src[:]) + return k.Hash(dst[:]) +} + +func (k *G2Elt) Set(q kyber.Point) kyber.Point { + k.p.Set(q.(*G2Elt).p) + return k +} + +func (k *G2Elt) Clone() kyber.Point { + var p bls12381.PointG2 + p.Set(k.p) + return newG2(&p, k.dst) +} + +func (k *G2Elt) EmbedLen() int { + panic("bls12-381: unsupported operation") +} + +func (k *G2Elt) Embed(data []byte, rand cipher.Stream) kyber.Point { + panic("bls12-381: unsupported operation") +} + +func (k *G2Elt) Data() ([]byte, error) { + panic("bls12-381: unsupported operation") +} + +func (k *G2Elt) Add(a, b kyber.Point) kyber.Point { + aa := a.(*G2Elt) + bb := b.(*G2Elt) + bls12381.NewG2().Add(k.p, aa.p, bb.p) + return k +} + +func (k *G2Elt) Sub(a, b kyber.Point) kyber.Point { + aa := a.(*G2Elt) + bb := b.(*G2Elt) + bls12381.NewG2().Sub(k.p, aa.p, bb.p) + return k +} + +func (k *G2Elt) Neg(a kyber.Point) kyber.Point { + aa := a.(*G2Elt) + bls12381.NewG2().Neg(k.p, aa.p) + return k +} + +func (k *G2Elt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + if q == nil { + q = NullG2(k.dst...).Base() + } + bls12381.NewG2().MulScalarBig(k.p, q.(*G2Elt).p, &s.(*mod.Int).V) + return k +} + +// MarshalBinary returns a compressed point, without any domain separation tag information +func (k *G2Elt) MarshalBinary() ([]byte, error) { + // we need to clone the point because of https://github.com/kilic/bls12-381/issues/37 + // in order to avoid risks of race conditions. + t := new(bls12381.PointG2).Set(k.p) + return bls12381.NewG2().ToCompressed(t), nil +} + +// UnmarshalBinary populates the point from a compressed point representation. +func (k *G2Elt) UnmarshalBinary(buff []byte) error { + var err error + k.p, err = bls12381.NewG2().FromCompressed(buff) + return err +} + +// MarshalTo writes a compressed point to the Writer, without any domain separation tag information +func (k *G2Elt) MarshalTo(w io.Writer) (int, error) { + buf, err := k.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +// UnmarshalFrom populates the point from a compressed point representation read from the Reader. +func (k *G2Elt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, k.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, k.UnmarshalBinary(buf) +} + +func (k *G2Elt) MarshalSize() int { + return 96 +} + +func (k *G2Elt) String() string { + b, _ := k.MarshalBinary() + return "bls12-381.G2: " + hex.EncodeToString(b) +} + +func (k *G2Elt) Hash(m []byte) kyber.Point { + domain := domainG2 + // We treat a 0 len dst as the default value as per the RFC "Tags MUST have nonzero length" + if len(k.dst) != 0 { + domain = k.dst + } + pg2, _ := bls12381.NewG2().HashToCurve(m, domain) + k.p = pg2 + return k +} + +func (k *G2Elt) IsInCorrectGroup() bool { + return bls12381.NewG2().InCorrectSubgroup(k.p) +} diff --git a/pairing/bls12381/kilic/group.go b/pairing/bls12381/kilic/group.go new file mode 100644 index 000000000..9f2a3060f --- /dev/null +++ b/pairing/bls12381/kilic/group.go @@ -0,0 +1,89 @@ +package kilic + +import ( + "crypto/cipher" + "crypto/sha256" + "hash" + + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/util/random" + "go.dedis.ch/kyber/v3/xof/blake2xb" +) + +// GroupChecker allows to verify if a Point is in the correct group or not. For +// curves which don't have a prime order, we need to only consider the points +// lying in the subgroup of prime order. That check returns true if the point is +// correct or not. +type GroupChecker interface { + kyber.Point + IsInCorrectGroup() bool +} + +type groupBls struct { + str string + newPoint func() kyber.Point + isPrime bool +} + +func (g *groupBls) String() string { + return g.str +} + +func (g *groupBls) Scalar() kyber.Scalar { + return NewScalar() +} + +func (g *groupBls) ScalarLen() int { + return g.Scalar().MarshalSize() +} + +func (g *groupBls) PointLen() int { + return g.Point().MarshalSize() +} + +func (g *groupBls) Point() kyber.Point { + return g.newPoint() +} + +func (g *groupBls) IsPrimeOrder() bool { + return g.isPrime +} + +func (g *groupBls) Hash() hash.Hash { + return sha256.New() +} + +// XOF returns a newly instantiated blake2xb XOF function. +func (g *groupBls) XOF(seed []byte) kyber.XOF { + return blake2xb.New(seed) +} + +// RandomStream returns a cipher.Stream which corresponds to a key stream from +// crypto/rand. +func (g *groupBls) RandomStream() cipher.Stream { + return random.New() +} + +func NewGroupG1(dst ...byte) kyber.Group { + return &groupBls{ + str: "bls12-381.G1", + newPoint: func() kyber.Point { return NullG1(dst...) }, + isPrime: true, + } +} + +func NewGroupG2(dst ...byte) kyber.Group { + return &groupBls{ + str: "bls12-381.G2", + newPoint: func() kyber.Point { return NullG2(dst...) }, + isPrime: false, + } +} + +func NewGroupGT() kyber.Group { + return &groupBls{ + str: "bls12-381.GT", + newPoint: func() kyber.Point { return newEmptyGT() }, + isPrime: false, + } +} diff --git a/pairing/bls12381/kilic/gt.go b/pairing/bls12381/kilic/gt.go new file mode 100644 index 000000000..4e8a6f523 --- /dev/null +++ b/pairing/bls12381/kilic/gt.go @@ -0,0 +1,127 @@ +package kilic + +import ( + "crypto/cipher" + "encoding/hex" + "io" + + bls12381 "github.com/kilic/bls12-381" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/group/mod" +) + +type GTElt struct { + f *bls12381.E +} + +func newEmptyGT() *GTElt { + return newGT(bls12381.NewGT().New()) +} +func newGT(f *bls12381.E) *GTElt { + return >Elt{ + f: f, + } +} + +func (k *GTElt) Equal(kk kyber.Point) bool { + return k.f.Equal(kk.(*GTElt).f) +} + +func (k *GTElt) Null() kyber.Point { + // One since we deal with Gt elements as a multiplicative group only + // i.e. Add in kyber -> mul in kilic/, Neg in kyber -> inverse in kilic/ etc + k.f = bls12381.NewGT().New() + return k +} + +func (k *GTElt) Base() kyber.Point { + panic("bls12-381.GT.Base(): unsupported operation") +} + +func (k *GTElt) Pick(rand cipher.Stream) kyber.Point { + panic("bls12-381.GT.Pick(): unsupported operation") +} + +func (k *GTElt) Set(q kyber.Point) kyber.Point { + k.f.Set(q.(*GTElt).f) + return k +} + +func (k *GTElt) Clone() kyber.Point { + kk := newEmptyGT() + kk.Set(k) + return kk +} + +func (k *GTElt) Add(a, b kyber.Point) kyber.Point { + aa := a.(*GTElt) + bb := b.(*GTElt) + bls12381.NewGT().Mul(k.f, aa.f, bb.f) + return k +} + +func (k *GTElt) Sub(a, b kyber.Point) kyber.Point { + nb := newEmptyGT().Neg(b) + return newEmptyGT().Add(a, nb) +} + +func (k *GTElt) Neg(q kyber.Point) kyber.Point { + qq := q.(*GTElt) + bls12381.NewGT().Inverse(k.f, qq.f) + return k +} + +func (k *GTElt) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { + v := s.(*mod.Int).V + qq := q.(*GTElt) + bls12381.NewGT().Exp(k.f, qq.f, &v) + return k +} + +func (k *GTElt) MarshalBinary() ([]byte, error) { + return bls12381.NewGT().ToBytes(k.f), nil +} + +func (k *GTElt) MarshalTo(w io.Writer) (int, error) { + buf, err := k.MarshalBinary() + if err != nil { + return 0, err + } + return w.Write(buf) +} + +func (k *GTElt) UnmarshalBinary(buf []byte) error { + fe12, err := bls12381.NewGT().FromBytes(buf) + k.f = fe12 + return err +} + +func (k *GTElt) UnmarshalFrom(r io.Reader) (int, error) { + buf := make([]byte, k.MarshalSize()) + n, err := io.ReadFull(r, buf) + if err != nil { + return n, err + } + return n, k.UnmarshalBinary(buf) +} + +func (k *GTElt) MarshalSize() int { + return 576 +} + +func (k *GTElt) String() string { + b, _ := k.MarshalBinary() + return "bls12-381.GT: " + hex.EncodeToString(b) +} + +func (k *GTElt) EmbedLen() int { + panic("bls12-381.GT.EmbedLen(): unsupported operation") +} + +func (k *GTElt) Embed(data []byte, rand cipher.Stream) kyber.Point { + panic("bls12-381.GT.Embed(): unsupported operation") +} + +func (k *GTElt) Data() ([]byte, error) { + panic("bls12-381.GT.Data(): unsupported operation") +} diff --git a/pairing/bls12381/scalar.go b/pairing/bls12381/kilic/scalar.go similarity index 70% rename from pairing/bls12381/scalar.go rename to pairing/bls12381/kilic/scalar.go index 960371145..c39a3dd93 100644 --- a/pairing/bls12381/scalar.go +++ b/pairing/bls12381/kilic/scalar.go @@ -1,4 +1,4 @@ -package bls12381 +package kilic import ( "math/big" @@ -9,7 +9,6 @@ import ( var curveOrder, _ = new(big.Int).SetString("73eda753299d7d483339d80809a1d80553bda402fffe5bfeffffffff00000001", 16) -// NewKyberScalar returns a new scalar value -func NewKyberScalar() kyber.Scalar { +func NewScalar() kyber.Scalar { return mod.NewInt64(0, curveOrder) } diff --git a/pairing/bls12381/kilic/suite.go b/pairing/bls12381/kilic/suite.go new file mode 100644 index 000000000..441ab93e3 --- /dev/null +++ b/pairing/bls12381/kilic/suite.go @@ -0,0 +1,104 @@ +package kilic + +import ( + "crypto/cipher" + "crypto/sha256" + "hash" + "io" + "reflect" + + bls12381 "github.com/kilic/bls12-381" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/pairing" + "go.dedis.ch/kyber/v3/util/random" + "go.dedis.ch/kyber/v3/xof/blake2xb" +) + +type Suite struct { + domainG1 []byte + domainG2 []byte +} + +// NewBLS12381Suite is the same as calling NewBLS12381SuiteWithDST(nil, nil): it uses the default domain separation +// tags for its Hash To Curve functions. +func NewBLS12381Suite() pairing.Suite { + return &Suite{} +} + +// NewBLS12381SuiteWithDST allows you to set your own domain separation tags to be used by the Hash To Curve functions. +// Since the DST shouldn't be 0 len, if you provide nil or a 0 len byte array, it will use the RFC default values. +func NewBLS12381SuiteWithDST(DomainG1, DomainG2 []byte) pairing.Suite { + return &Suite{domainG1: DomainG1, domainG2: DomainG2} +} + +func (s *Suite) SetDomainG1(dst []byte) { + s.domainG1 = dst +} + +func (s *Suite) G1() kyber.Group { + return NewGroupG1(s.domainG1...) +} + +func (s *Suite) SetDomainG2(dst []byte) { + s.domainG2 = dst +} + +func (s *Suite) G2() kyber.Group { + return NewGroupG2(s.domainG2...) +} + +func (s *Suite) GT() kyber.Group { + return NewGroupGT() +} + +// ValidatePairing implements the `pairing.Suite` interface +func (s *Suite) ValidatePairing(p1, p2, p3, p4 kyber.Point) bool { + e := bls12381.NewEngine() + // we need to clone the point because of https://github.com/kilic/bls12-381/issues/37 + // in order to avoid risks of race conditions. + g1point := new(bls12381.PointG1).Set(p1.(*G1Elt).p) + g2point := new(bls12381.PointG2).Set(p2.(*G2Elt).p) + g1point2 := new(bls12381.PointG1).Set(p3.(*G1Elt).p) + g2point2 := new(bls12381.PointG2).Set(p4.(*G2Elt).p) + e.AddPair(g1point, g2point) + e.AddPairInv(g1point2, g2point2) + return e.Check() +} + +func (s *Suite) Pair(p1, p2 kyber.Point) kyber.Point { + e := bls12381.NewEngine() + g1point := p1.(*G1Elt).p + g2point := p2.(*G2Elt).p + return newGT(e.AddPair(g1point, g2point).Result()) +} + +// New implements the kyber.Encoding interface. +func (s *Suite) New(t reflect.Type) interface{} { + panic("Suite.Encoding: deprecated in kyber") +} + +// Read is the default implementation of kyber.Encoding interface Read. +func (s *Suite) Read(r io.Reader, objs ...interface{}) error { + panic("Suite.Read(): deprecated in kyber") +} + +// Write is the default implementation of kyber.Encoding interface Write. +func (s *Suite) Write(w io.Writer, objs ...interface{}) error { + panic("Suite.Write(): deprecated in kyber") +} + +// Hash returns a newly instantiated sha256 hash function. +func (s *Suite) Hash() hash.Hash { + return sha256.New() +} + +// XOF returns a newly instantiated blake2xb XOF function. +func (s *Suite) XOF(seed []byte) kyber.XOF { + return blake2xb.New(seed) +} + +// RandomStream returns a cipher.Stream which corresponds to a key stream from +// crypto/rand. +func (s *Suite) RandomStream() cipher.Stream { + return random.New() +} diff --git a/pairing/bls12381/kilic/suite_test.go b/pairing/bls12381/kilic/suite_test.go new file mode 100644 index 000000000..8254c84d3 --- /dev/null +++ b/pairing/bls12381/kilic/suite_test.go @@ -0,0 +1,563 @@ +package kilic + +import ( + "bytes" + "crypto/cipher" + "crypto/sha256" + "encoding/binary" + "encoding/hex" + "sync" + "testing" + + "go.dedis.ch/kyber/v3/pairing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/sign/bls" + "go.dedis.ch/kyber/v3/sign/tbls" + "go.dedis.ch/kyber/v3/sign/test" + "go.dedis.ch/kyber/v3/util/random" +) + +// Apply a generic set of validation tests to a cryptographic Group, +// using a given source of [pseudo-]randomness. +// +// Returns a log of the pseudorandom Points produced in the test, +// for comparison across alternative implementations +// that are supposed to be equivalent. +func testGroup(t *testing.T, g kyber.Group, rand cipher.Stream) []kyber.Point { + t.Logf("\nTesting group '%s': %d-byte Point, %d-byte Scalar\n", + g.String(), g.PointLen(), g.ScalarLen()) + + points := make([]kyber.Point, 0) + ptmp := g.Point() + stmp := g.Scalar() + pzero := g.Point().Null() + szero := g.Scalar().Zero() + sone := g.Scalar().One() + + // Do a simple Diffie-Hellman test + s1 := g.Scalar().Pick(rand) + s2 := g.Scalar().Pick(rand) + if s1.Equal(szero) { + t.Fatalf("first secret is scalar zero %v", s1) + } + if s2.Equal(szero) { + t.Fatalf("second secret is scalar zero %v", s2) + } + if s1.Equal(s2) { + t.Fatalf("not getting unique secrets: picked %s twice", s1) + } + + gen := g.Point().Base() + points = append(points, gen) + + // Sanity-check relationship between addition and multiplication + p1 := g.Point().Add(gen, gen) + p2 := g.Point().Mul(stmp.SetInt64(2), nil) + if !p1.Equal(p2) { + t.Fatalf("multiply by two doesn't work: %v == %v (+) %[2]v != %[2]v (x) 2 == %v", p1, gen, p2) + } + p1.Add(p1, p1) + p2.Mul(stmp.SetInt64(4), nil) + if !p1.Equal(p2) { + t.Fatalf("multiply by four doesn't work: %v (+) %[1]v != %v (x) 4 == %v", + g.Point().Add(gen, gen), gen, p2) + } + points = append(points, p1) + + // Find out if this curve has a prime order: + // if the curve does not offer a method IsPrimeOrder, + // then assume that it is. + type canCheckPrimeOrder interface { + IsPrimeOrder() bool + } + primeOrder := true + if gpo, ok := g.(canCheckPrimeOrder); ok { + primeOrder = gpo.IsPrimeOrder() + } + + // Verify additive and multiplicative identities of the generator. + // TODO: Check GT exp + /*fmt.Println("Inverse of base")*/ + //f := ptmp.Base().(*KyberGT).f + //newFp12(nil).inverse(f, f) + //fmt.Printf("\n-Inverse: %v\n", f) + //fmt.Println("Multiply by -1") + ptmp.Mul(stmp.SetInt64(-1), nil).Add(ptmp, gen) + /*fmt.Printf(" \n\nChecking equality additive identity\nptmp: %v \n\n zero %v\n", ptmp, pzero)*/ + if !ptmp.Equal(pzero) { + t.Fatalf("generator additive identity doesn't work: (scalar -1 %v) %v (x) -1 (+) %v = %v != %v the group point identity", + stmp.SetInt64(-1), ptmp.Mul(stmp.SetInt64(-1), nil), gen, ptmp.Mul(stmp.SetInt64(-1), nil).Add(ptmp, gen), pzero) + } + // secret.Inv works only in prime-order groups + if primeOrder { + ptmp.Mul(stmp.SetInt64(2), nil).Mul(stmp.Inv(stmp), ptmp) + if !ptmp.Equal(gen) { + t.Fatalf("generator multiplicative identity doesn't work:\n%v (x) %v = %v\n%[3]v (x) %v = %v", + ptmp.Base().String(), stmp.SetInt64(2).String(), + ptmp.Mul(stmp.SetInt64(2), nil).String(), + stmp.Inv(stmp).String(), + ptmp.Mul(stmp.SetInt64(2), nil).Mul(stmp.Inv(stmp), ptmp).String()) + } + } + + p1.Mul(s1, gen) + p2.Mul(s2, gen) + if p1.Equal(p2) { + t.Fatalf("encryption isn't producing unique points: %v (x) %v == %v (x) %[2]v == %[4]v", s1, gen, s2, p1) + } + points = append(points, p1) + + dh1 := g.Point().Mul(s2, p1) + dh2 := g.Point().Mul(s1, p2) + if !dh1.Equal(dh2) { + t.Fatalf("Diffie-Hellman didn't work: %v == %v (x) %v != %v (x) %v == %v", dh1, s2, p1, s1, p2, dh2) + } + points = append(points, dh1) + //t.Logf("shared secret = %v", dh1) + + // Test secret inverse to get from dh1 back to p1 + if primeOrder { + ptmp.Mul(g.Scalar().Inv(s2), dh1) + if !ptmp.Equal(p1) { + t.Fatalf("Scalar inverse didn't work: %v != (-)%v (x) %v == %v", p1, s2, dh1, ptmp) + } + } + + // Zero and One identity secrets + //println("dh1^0 = ",ptmp.Mul(dh1, szero).String()) + if !ptmp.Mul(szero, dh1).Equal(pzero) { + t.Fatalf("Encryption with secret=0 didn't work: %v (x) %v == %v != %v", szero, dh1, ptmp, pzero) + } + if !ptmp.Mul(sone, dh1).Equal(dh1) { + t.Fatalf("Encryption with secret=1 didn't work: %v (x) %v == %v != %[2]v", sone, dh1, ptmp) + } + + // Additive homomorphic identities + ptmp.Add(p1, p2) + stmp.Add(s1, s2) + pt2 := g.Point().Mul(stmp, gen) + if !pt2.Equal(ptmp) { + t.Fatalf("Additive homomorphism doesn't work: %v + %v == %v, %[3]v (x) %v == %v != %v == %v (+) %v", + s1, s2, stmp, gen, pt2, ptmp, p1, p2) + } + ptmp.Sub(p1, p2) + stmp.Sub(s1, s2) + pt2.Mul(stmp, gen) + if !pt2.Equal(ptmp) { + t.Fatalf("Additive homomorphism doesn't work: %v - %v == %v, %[3]v (x) %v == %v != %v == %v (-) %v", + s1, s2, stmp, gen, pt2, ptmp, p1, p2) + } + st2 := g.Scalar().Neg(s2) + st2.Add(s1, st2) + if !stmp.Equal(st2) { + t.Fatalf("Scalar.Neg doesn't work: -%v == %v, %[2]v + %v == %v != %v", + s2, g.Scalar().Neg(s2), s1, st2, stmp) + } + pt2.Neg(p2).Add(pt2, p1) + if !pt2.Equal(ptmp) { + t.Fatalf("Point.Neg doesn't work: (-)%v == %v, %[2]v (+) %v == %v != %v", + p2, g.Point().Neg(p2), p1, pt2, ptmp) + } + + // Multiplicative homomorphic identities + stmp.Mul(s1, s2) + if !ptmp.Mul(stmp, gen).Equal(dh1) { + t.Fatalf("Multiplicative homomorphism doesn't work: %v * %v == %v, %[2]v (x) %v == %v != %v", + s1, s2, stmp, gen, ptmp, dh1) + } + if primeOrder { + st2.Inv(s2) + st2.Mul(st2, stmp) + if !st2.Equal(s1) { + t.Fatalf("Scalar division doesn't work: %v^-1 * %v == %v * %[2]v == %[4]v != %v", + s2, stmp, g.Scalar().Inv(s2), st2, s1) + } + st2.Div(stmp, s2) + if !st2.Equal(s1) { + t.Fatalf("Scalar division doesn't work: %v / %v == %v != %v", + stmp, s2, st2, s1) + } + } + + pick := func(rand cipher.Stream) (p kyber.Point) { + defer func() { + /*if err := recover(); err != nil {*/ + //// TODO implement Pick for GT + //p = g.Point().Mul(g.Scalar().Pick(rand), nil) + //return + /*}*/ + }() + p = g.Point().Pick(rand) + return + } + + // Test randomly picked points + last := gen + for i := 0; i < 5; i++ { + // TODO fork kyber and make that an interface + rgen := pick(rand) + if rgen.Equal(last) { + t.Fatalf("Pick() not producing unique points: got %v twice", rgen) + } + last = rgen + + ptmp.Mul(stmp.SetInt64(-1), rgen).Add(ptmp, rgen) + if !ptmp.Equal(pzero) { + t.Fatalf("random generator fails additive identity: %v (x) %v == %v, %v (+) %[3]v == %[5]v != %v", + g.Scalar().SetInt64(-1), rgen, g.Point().Mul(g.Scalar().SetInt64(-1), rgen), + rgen, g.Point().Mul(g.Scalar().SetInt64(-1), rgen), pzero) + } + if primeOrder { + ptmp.Mul(stmp.SetInt64(2), rgen).Mul(stmp.Inv(stmp), ptmp) + if !ptmp.Equal(rgen) { + t.Fatalf("random generator fails multiplicative identity: %v (x) (2 (x) %v) == %v != %[2]v", + stmp, rgen, ptmp) + } + } + points = append(points, rgen) + } + + // Test encoding and decoding + buf := new(bytes.Buffer) + for i := 0; i < 5; i++ { + buf.Reset() + s := g.Scalar().Pick(rand) + if _, err := s.MarshalTo(buf); err != nil { + t.Fatalf("encoding of secret fails: " + err.Error()) + } + if _, err := stmp.UnmarshalFrom(buf); err != nil { + t.Fatalf("decoding of secret fails: " + err.Error()) + } + if !stmp.Equal(s) { + t.Fatalf("decoding produces different secret than encoded") + } + + buf.Reset() + p := pick(rand) + if _, err := p.MarshalTo(buf); err != nil { + t.Fatalf("encoding of point fails: " + err.Error()) + } + if _, err := ptmp.UnmarshalFrom(buf); err != nil { + t.Fatalf("decoding of point fails: " + err.Error()) + } + + if !ptmp.Equal(p) { + t.Fatalf("decoding produces different point than encoded") + } + } + + // Test that we can marshal/ unmarshal null point + pzero = g.Point().Null() + b, _ := pzero.MarshalBinary() + repzero := g.Point() + err := repzero.UnmarshalBinary(b) + if err != nil { + t.Fatalf("Could not unmarshall binary %v: %v", b, err) + } + + return points +} + +// GroupTest applies a generic set of validation tests to a cryptographic Group. +func GroupTest(t *testing.T, g kyber.Group) { + testGroup(t, g, random.New()) +} + +func TestKyberG1(t *testing.T) { + GroupTest(t, NewGroupG1()) +} + +func TestKyberG2(t *testing.T) { + GroupTest(t, NewGroupG2()) +} + +func TestKyberPairingG2(t *testing.T) { + s := NewBLS12381Suite().(*Suite) + a := s.G1().Scalar().Pick(s.RandomStream()) + b := s.G2().Scalar().Pick(s.RandomStream()) + aG := s.G1().Point().Mul(a, nil) + bH := s.G2().Point().Mul(b, nil) + ab := s.G1().Scalar().Mul(a, b) + abG := s.G1().Point().Mul(ab, nil) + // e(aG, bG) = e(G,H)^(ab) + p1 := s.Pair(aG, bH) + // e((ab)G,H) = e(G,H)^(ab) + p2 := s.Pair(abG, s.G2().Point().Base()) + require.True(t, p1.Equal(p2)) + require.True(t, s.ValidatePairing(aG, bH, abG.Clone(), s.G2().Point().Base())) + + pRandom := s.Pair(aG, s.G2().Point().Pick(s.RandomStream())) + require.False(t, p1.Equal(pRandom)) + pRandom = s.Pair(s.G1().Point().Pick(s.RandomStream()), bH) + require.False(t, p1.Equal(pRandom)) +} + +func TestRacePairings(t *testing.T) { + s := NewBLS12381Suite().(*Suite) + a := s.G1().Scalar().Pick(s.RandomStream()) + aG := s.G1().Point().Mul(a, nil) + B := s.G2().Point().Pick(s.RandomStream()) + aB := s.G2().Point().Mul(a, B.Clone()) + wg := sync.WaitGroup{} + for i := 0; i < 10; i++ { + wg.Add(1) + go func() { + // e(p1,p2) =?= e(inv1^-1, inv2^-1) + s.ValidatePairing(aG, B, s.G1().Point(), aB) + wg.Done() + }() + } + wg.Wait() +} + +func BenchmarkPairingSeparate(bb *testing.B) { + s := NewBLS12381Suite().(*Suite) + a := s.G1().Scalar().Pick(s.RandomStream()) + b := s.G2().Scalar().Pick(s.RandomStream()) + aG := s.G1().Point().Mul(a, nil) + bH := s.G2().Point().Mul(b, nil) + ab := s.G1().Scalar().Mul(a, b) + abG := s.G1().Point().Mul(ab, nil) + bb.ResetTimer() + for i := 0; i < bb.N; i++ { + + // e(aG, bG) = e(G,H)^(ab) + p1 := s.Pair(aG, bH) + // e((ab)G,H) = e(G,H)^(ab) + p2 := s.Pair(abG, s.G2().Point().Base()) + if !p1.Equal(p2) { + panic("aie") + } + } +} + +func BenchmarkPairingInv(bb *testing.B) { + s := NewBLS12381Suite().(*Suite) + a := s.G1().Scalar().Pick(s.RandomStream()) + b := s.G2().Scalar().Pick(s.RandomStream()) + aG := s.G1().Point().Mul(a, nil) + bH := s.G2().Point().Mul(b, nil) + ab := s.G1().Scalar().Mul(a, b) + abG := s.G1().Point().Mul(ab, nil) + bb.ResetTimer() + for i := 0; i < bb.N; i++ { + if !s.ValidatePairing(aG, bH, abG.Clone(), s.G2().Point().Base()) { + panic("aie") + } + } +} + +func TestKyberBLSG2(t *testing.T) { + suite := NewBLS12381Suite() + scheme := bls.NewSchemeOnG2(suite) + test.SchemeTesting(t, scheme) +} + +func TestKyberBLSG1(t *testing.T) { + suite := NewBLS12381Suite() + scheme := bls.NewSchemeOnG2(suite) + test.SchemeTesting(t, scheme) +} + +func TestKyberThresholdG2(t *testing.T) { + suite := NewBLS12381Suite() + tscheme := tbls.NewThresholdSchemeOnG2(suite) + test.ThresholdTest(t, suite.G1(), tscheme) +} + +func TestKyberThresholdG1(t *testing.T) { + suite := NewBLS12381Suite() + tscheme := tbls.NewThresholdSchemeOnG2(suite) + test.ThresholdTest(t, suite.G1(), tscheme) +} + +func TestIsValidGroup(t *testing.T) { + suite := NewBLS12381Suite() + p1 := suite.G1().Point().Pick(random.New()) + p2 := suite.G1().Point().Pick(random.New()) + + require.True(t, p1.(GroupChecker).IsInCorrectGroup()) + require.True(t, p2.(GroupChecker).IsInCorrectGroup()) +} + +var suite = NewBLS12381Suite() + +func NewElement() kyber.Scalar { + return suite.G1().Scalar() +} +func NewG1() kyber.Point { + return suite.G1().Point().Base() +} +func NewG2() kyber.Point { + return suite.G2().Point().Base() +} +func Pair(a, b kyber.Point) kyber.Point { + return suite.Pair(a, b) +} +func TestBasicPairing(t *testing.T) { + // we test a * b = c + d + a := NewElement().Pick(random.New()) + b := NewElement().Pick(random.New()) + c := NewElement().Pick(random.New()) + d := NewElement().Sub(NewElement().Mul(a, b), c) + + // check in the clear + ab := NewElement().Mul(a, b) + cd := NewElement().Add(c, d) + require.True(t, ab.Equal(cd)) + + // check in the exponent now with the following + // e(aG1,bG2) = e(cG1,G2) * e(G1,dG2) <=> + // e(G1,G2)^(a*b) = e(G1,G2)^c * e(G1,G2)^d + // e(G1,G2)^(a*b) = e(G1,G2)^(c + d) + aG := NewG1().Mul(a, nil) + bG := NewG2().Mul(b, nil) + left := Pair(aG, bG) + + cG := NewG1().Mul(c, nil) + right1 := Pair(cG, NewG2()) + dG := NewG2().Mul(d, nil) + right2 := Pair(NewG1(), dG) + right := suite.GT().Point().Add(right1, right2) + require.True(t, left.Equal(right)) + + // Test if addition works in GT + mright := right.Clone().Neg(right) + res := mright.Add(mright, right) + require.True(t, res.Equal(suite.GT().Point().Null())) + + // Test if Sub works in GT + expZero := right.Clone().Sub(right, right) + require.True(t, expZero.Equal(suite.GT().Point().Null())) + + // Test if scalar mul works in GT + // e(aG,G) == e(G,G)^a + left = Pair(aG, suite.G2().Point().Base()) + right = Pair(suite.G1().Point().Base(), suite.G2().Point().Base()) + right = right.Mul(a, right) + require.True(t, left.Equal(right)) +} + +func TestVerifySigOnG1WithG2Domain(t *testing.T) { + pk := "a0b862a7527fee3a731bcb59280ab6abd62d5c0b6ea03dc4ddf6612fdfc9d01f01c31542541771903475eb1ec6615f8d0df0b8b6dce385811d6dcf8cbefb8759e5e616a3dfd054c928940766d9a5b9db91e3b697e5d70a975181e007f87fca5e" + sig := "9544ddce2fdbe8688d6f5b4f98eed5d63eee3902e7e162050ac0f45905a55657714880adabe3c3096b92767d886567d0" + round := uint64(1) + + suite := NewBLS12381Suite() + + pkb, _ := hex.DecodeString(pk) + pubkeyP := suite.G2().Point() + pubkeyP.UnmarshalBinary(pkb) + sigb, _ := hex.DecodeString(sig) + sigP := suite.G1().Point() + sigP.UnmarshalBinary(sigb) + h := sha256.New() + _ = binary.Write(h, binary.BigEndian, round) + msg := h.Sum(nil) + + base := suite.G2().Point().Base().Clone() + MsgP := suite.G1().Point().(kyber.HashablePoint).Hash(msg) + if suite.ValidatePairing(MsgP, pubkeyP, sigP, base) { + t.Fatalf("Should have failed to validate because of invalid domain") + } + + // setting the wrong domain for G1 hashing + suite.(*Suite).SetDomainG1(DefaultDomainG2()) + MsgP = suite.G1().Point().(kyber.HashablePoint).Hash(msg) + if !suite.ValidatePairing(MsgP, pubkeyP, sigP, base) { + t.Fatalf("Error validating pairing") + } +} + +func TestVerifySigOnG2(t *testing.T) { + pk := "868f005eb8e6e4ca0a47c8a77ceaa5309a47978a7c71bc5cce96366b5d7a569937c529eeda66c7293784a9402801af31" + sig := "8d61d9100567de44682506aea1a7a6fa6e5491cd27a0a0ed349ef6910ac5ac20ff7bc3e09d7c046566c9f7f3c6f3b10104990e7cb424998203d8f7de586fb7fa5f60045417a432684f85093b06ca91c769f0e7ca19268375e659c2a2352b4655" + prevSig := "176f93498eac9ca337150b46d21dd58673ea4e3581185f869672e59fa4cb390a" + round := uint64(1) + + suite := NewBLS12381Suite() + pkb, _ := hex.DecodeString(pk) + pubkeyP := suite.G1().Point() + pubkeyP.UnmarshalBinary(pkb) + sigb, _ := hex.DecodeString(sig) + sigP := suite.G2().Point() + sigP.UnmarshalBinary(sigb) + prev, _ := hex.DecodeString(prevSig) + h := sha256.New() + h.Write(prev) + _ = binary.Write(h, binary.BigEndian, round) + msg := h.Sum(nil) + + base := suite.G1().Point().Base().Clone() + MsgP := suite.G2().Point().(kyber.HashablePoint).Hash(msg) + if !suite.ValidatePairing(base, sigP, pubkeyP, MsgP) { + t.Fatalf("Error validating pairing") + } +} + +func TestImplementInterfaces(t *testing.T) { + var _ kyber.Point = &G1Elt{} + var _ kyber.Point = &G2Elt{} + var _ kyber.Point = >Elt{} + var _ kyber.HashablePoint = &G1Elt{} + var _ kyber.HashablePoint = &G2Elt{} + // var _ kyber.hashablePoint = &KyberGT{} // GT is not hashable for now + var _ kyber.Group = &groupBls{} + var _ pairing.Suite = &Suite{} +} + +func TestSuiteWithDST(t *testing.T) { + pk := "a0b862a7527fee3a731bcb59280ab6abd62d5c0b6ea03dc4ddf6612fdfc9d01f01c31542541771903475eb1ec6615f8d0df0b8b6dce385811d6dcf8cbefb8759e5e616a3dfd054c928940766d9a5b9db91e3b697e5d70a975181e007f87fca5e" + sig := "9544ddce2fdbe8688d6f5b4f98eed5d63eee3902e7e162050ac0f45905a55657714880adabe3c3096b92767d886567d0" + round := uint64(1) + // using DomainG2 for G1 + suite := NewBLS12381SuiteWithDST(DefaultDomainG2(), nil) + + pkb, _ := hex.DecodeString(pk) + pubkeyP := suite.G2().Point() + pubkeyP.UnmarshalBinary(pkb) + sigb, _ := hex.DecodeString(sig) + sigP := suite.G1().Point() + sigP.UnmarshalBinary(sigb) + h := sha256.New() + _ = binary.Write(h, binary.BigEndian, round) + msg := h.Sum(nil) + + base := suite.G2().Point().Base().Clone() + MsgP := suite.G1().Point().(kyber.HashablePoint).Hash(msg) + if !suite.ValidatePairing(MsgP, pubkeyP, sigP, base) { + t.Fatalf("Error validating pairing") + } +} + +func TestExplicitDefaultDST(t *testing.T) { + g1d1 := NullG1([]byte("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_")...) + g2d1 := NullG2([]byte("BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_")...) + g1d2 := NullG1([]byte("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_")...) + g2d2 := NullG2([]byte("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_")...) + + if g1d1.dst != nil { + t.Fatal("Default G1 DST should be represented internally as nil. Got:", string(g1d1.dst)) + } + if g2d2.dst != nil { + t.Fatal("Default G2 DST should be represented internally as nil. Got:", string(g2d2.dst)) + } + if !bytes.Equal(g1d2.dst, domainG2) { + t.Fatal("Non-default G1 DST should not be nil. Got:", string(g1d2.dst)) + } + if !bytes.Equal(g2d1.dst, domainG1) { + t.Fatal("Non-default G2 DST should not be nil. Got:", string(g2d1.dst)) + } + + suite := NewBLS12381SuiteWithDST(DefaultDomainG2(), DefaultDomainG2()) + sg1 := suite.G1().Point() + sg2 := suite.G2().Point() + if p, ok := sg1.(*G1Elt); !ok || !bytes.Equal(p.dst, domainG2) { + t.Fatal("Non-default G1 DST should not be nil. Got:", string(p.dst)) + } + if p, ok := sg2.(*G2Elt); !ok || p.dst != nil { + t.Fatal("Default G2 DST should be represented internally as nil. Got:", string(p.dst)) + } +} diff --git a/pairing/bls12381/point.go b/pairing/bls12381/point.go deleted file mode 100644 index dba60ae04..000000000 --- a/pairing/bls12381/point.go +++ /dev/null @@ -1,403 +0,0 @@ -package bls12381 - -import ( - "crypto/cipher" - "crypto/sha256" - "encoding/hex" - "io" - - bls12381 "github.com/kilic/bls12-381" - "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/group/mod" -) - -// pointG1 is a kyber.Point holding a G1 point on BLS12-381 curve -type pointG1 struct { - p *bls12381.PointG1 -} - -func nullpointG1() *pointG1 { - var p bls12381.PointG1 - return newPointG1(&p) -} - -func newPointG1(p *bls12381.PointG1) *pointG1 { - return &pointG1{p: p} -} - -func (k *pointG1) Equal(k2 kyber.Point) bool { - return bls12381.NewG1().Equal(k.p, k2.(*pointG1).p) -} - -func (k *pointG1) Null() kyber.Point { - return newPointG1(bls12381.NewG1().Zero()) -} - -func (k *pointG1) Base() kyber.Point { - return newPointG1(bls12381.NewG1().One()) -} - -func (k *pointG1) Pick(rand cipher.Stream) kyber.Point { - var dst, src [32]byte - rand.XORKeyStream(dst[:], src[:]) - return k.Hash(dst[:]) -} - -func (k *pointG1) Set(q kyber.Point) kyber.Point { - k.p.Set(q.(*pointG1).p) - return k -} - -func (k *pointG1) Clone() kyber.Point { - var p bls12381.PointG1 - p.Set(k.p) - return newPointG1(&p) -} - -func (k *pointG1) EmbedLen() int { - panic("bls12-381: unsupported operation") -} - -func (k *pointG1) Embed(data []byte, rand cipher.Stream) kyber.Point { - panic("bls12-381: unsupported operation") -} - -func (k *pointG1) Data() ([]byte, error) { - panic("bls12-381: unsupported operation") -} - -func (k *pointG1) Add(a, b kyber.Point) kyber.Point { - aa := a.(*pointG1) - bb := b.(*pointG1) - bls12381.NewG1().Add(k.p, aa.p, bb.p) - return k -} - -func (k *pointG1) Sub(a, b kyber.Point) kyber.Point { - aa := a.(*pointG1) - bb := b.(*pointG1) - bls12381.NewG1().Sub(k.p, aa.p, bb.p) - return k -} - -func (k *pointG1) Neg(a kyber.Point) kyber.Point { - aa := a.(*pointG1) - bls12381.NewG1().Neg(k.p, aa.p) - return k -} - -func (k *pointG1) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { - if q == nil { - q = nullpointG1().Base() - } - bls12381.NewG1().MulScalarBig( - k.p, - q.(*pointG1).p, - &s.(*mod.Int).V, - ) - return k -} - -func (k *pointG1) MarshalBinary() ([]byte, error) { - return bls12381.NewG1().ToCompressed(k.p), nil -} - -func (k *pointG1) UnmarshalBinary(buff []byte) error { - var err error - k.p, err = bls12381.NewG1().FromCompressed(buff) - return err -} - -func (k *pointG1) MarshalTo(w io.Writer) (int, error) { - buf, err := k.MarshalBinary() - if err != nil { - return 0, err - } - return w.Write(buf) -} - -func (k *pointG1) UnmarshalFrom(r io.Reader) (int, error) { - buf := make([]byte, k.MarshalSize()) - n, err := io.ReadFull(r, buf) - if err != nil { - return n, err - } - return n, k.UnmarshalBinary(buf) -} - -func (k *pointG1) MarshalSize() int { - return 48 -} - -func (k *pointG1) String() string { - b, _ := k.MarshalBinary() - return "bls12-381.G1: " + hex.EncodeToString(b) -} - -func (k *pointG1) Hash(m []byte) kyber.Point { - p, _ := bls12381.NewG1().HashToCurve(m, Domain) - k.p = p - return k - -} - -func (k *pointG1) IsInCorrectGroup() bool { - return bls12381.NewG1().InCorrectSubgroup(k.p) -} - -// Domain comes from the ciphersuite used by the RFC of this name compatible -// with the paired library > v18 -var Domain = []byte("BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_NUL_") - -// pointG2 is a kyber.Point holding a G2 point on BLS12-381 curve -type pointG2 struct { - p *bls12381.PointG2 -} - -func nullpointG2() *pointG2 { - var p bls12381.PointG2 - return newPointG2(&p) -} - -func newPointG2(p *bls12381.PointG2) *pointG2 { - return &pointG2{p: p} -} - -func (k *pointG2) Equal(k2 kyber.Point) bool { - return bls12381.NewG2().Equal(k.p, k2.(*pointG2).p) -} - -func (k *pointG2) Null() kyber.Point { - return newPointG2(bls12381.NewG2().Zero()) -} - -func (k *pointG2) Base() kyber.Point { - return newPointG2(bls12381.NewG2().One()) -} - -func (k *pointG2) Pick(rand cipher.Stream) kyber.Point { - var dst, src [32]byte - rand.XORKeyStream(dst[:], src[:]) - return k.Hash(dst[:]) -} - -func (k *pointG2) Set(q kyber.Point) kyber.Point { - k.p.Set(q.(*pointG2).p) - return k -} - -func (k *pointG2) Clone() kyber.Point { - var p bls12381.PointG2 - p.Set(k.p) - return newPointG2(&p) -} - -func (k *pointG2) EmbedLen() int { - panic("bls12-381: unsupported operation") -} - -func (k *pointG2) Embed(data []byte, rand cipher.Stream) kyber.Point { - panic("bls12-381: unsupported operation") -} - -func (k *pointG2) Data() ([]byte, error) { - panic("bls12-381: unsupported operation") -} - -func (k *pointG2) Add(a, b kyber.Point) kyber.Point { - aa := a.(*pointG2) - bb := b.(*pointG2) - bls12381.NewG2().Add(k.p, aa.p, bb.p) - return k -} - -func (k *pointG2) Sub(a, b kyber.Point) kyber.Point { - aa := a.(*pointG2) - bb := b.(*pointG2) - bls12381.NewG2().Sub(k.p, aa.p, bb.p) - return k -} - -func (k *pointG2) Neg(a kyber.Point) kyber.Point { - aa := a.(*pointG2) - bls12381.NewG2().Neg(k.p, aa.p) - return k -} - -func (k *pointG2) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { - if q == nil { - q = nullpointG2().Base() - } - bls12381.NewG2().MulScalarBig( - k.p, - q.(*pointG2).p, - &s.(*mod.Int).V, - ) - return k -} - -func (k *pointG2) MarshalBinary() ([]byte, error) { - return bls12381.NewG2().ToCompressed(k.p), nil -} - -func (k *pointG2) UnmarshalBinary(buff []byte) error { - var err error - k.p, err = bls12381.NewG2().FromCompressed(buff) - return err -} - -func (k *pointG2) MarshalTo(w io.Writer) (int, error) { - buf, err := k.MarshalBinary() - if err != nil { - return 0, err - } - return w.Write(buf) -} - -func (k *pointG2) UnmarshalFrom(r io.Reader) (int, error) { - buf := make([]byte, k.MarshalSize()) - n, err := io.ReadFull(r, buf) - if err != nil { - return n, err - } - return n, k.UnmarshalBinary(buf) -} - -func (k *pointG2) MarshalSize() int { - return 96 -} - -func (k *pointG2) String() string { - b, _ := k.MarshalBinary() - return "bls12-381.G1: " + hex.EncodeToString(b) -} - -func (k *pointG2) Hash(m []byte) kyber.Point { - pg2, _ := bls12381.NewG2().HashToCurve(m, Domain) - k.p = pg2 - return k -} - -func sha256Hash(in []byte) []byte { - h := sha256.New() - h.Write(in) - return h.Sum(nil) -} - -func (k *pointG2) IsInCorrectGroup() bool { - return bls12381.NewG2().InCorrectSubgroup(k.p) -} - -type pointGT struct { - f *bls12381.E -} - -func newEmptyGT() *pointGT { - return newPointGT(bls12381.NewGT().New()) -} -func newPointGT(f *bls12381.E) *pointGT { - return &pointGT{ - f: f, - } -} - -func (k *pointGT) Equal(kk kyber.Point) bool { - return k.f.Equal(kk.(*pointGT).f) -} - -const gtLength = 576 - -func (k *pointGT) Null() kyber.Point { - var zero [gtLength]byte - k.f, _ = bls12381.NewGT().FromBytes(zero[:]) - return k -} - -func (k *pointGT) Base() kyber.Point { - panic("not yet available") -} - -func (k *pointGT) Pick(rand cipher.Stream) kyber.Point { - panic("TODO: bls12-381.GT.Pick()") -} - -func (k *pointGT) Set(q kyber.Point) kyber.Point { - k.f.Set(q.(*pointGT).f) - return k -} - -func (k *pointGT) Clone() kyber.Point { - kk := newEmptyGT() - kk.Set(k) - return kk -} - -func (k *pointGT) Add(a, b kyber.Point) kyber.Point { - aa := a.(*pointGT) - bb := b.(*pointGT) - bls12381.NewGT().Mul(k.f, aa.f, bb.f) - return k -} - -func (k *pointGT) Sub(a, b kyber.Point) kyber.Point { - aa := a.(*pointGT) - bb := b.(*pointGT) - bls12381.NewGT().Sub(k.f, aa.f, bb.f) - return k -} - -func (k *pointGT) Neg(q kyber.Point) kyber.Point { - panic("bls12-381: GT is not a full kyber.Point implementation") -} - -func (k *pointGT) Mul(s kyber.Scalar, q kyber.Point) kyber.Point { - panic("bls12-381: GT is not a full kyber.Point implementation") -} - -func (k *pointGT) MarshalBinary() ([]byte, error) { - return bls12381.NewGT().ToBytes(k.f), nil -} - -func (k *pointGT) MarshalTo(w io.Writer) (int, error) { - buf, err := k.MarshalBinary() - if err != nil { - return 0, err - } - return w.Write(buf) -} - -func (k *pointGT) UnmarshalBinary(buf []byte) error { - fe12, err := bls12381.NewGT().FromBytes(buf) - k.f = fe12 - return err -} - -func (k *pointGT) UnmarshalFrom(r io.Reader) (int, error) { - buf := make([]byte, k.MarshalSize()) - n, err := io.ReadFull(r, buf) - if err != nil { - return n, err - } - return n, k.UnmarshalBinary(buf) -} - -func (k *pointGT) MarshalSize() int { - return 576 -} - -func (k *pointGT) String() string { - b, _ := k.MarshalBinary() - return "bls12-381.GT: " + hex.EncodeToString(b) -} - -func (k *pointGT) EmbedLen() int { - panic("bls12-381.GT.EmbedLen(): unsupported operation") -} - -func (k *pointGT) Embed(data []byte, rand cipher.Stream) kyber.Point { - panic("bls12-381.GT.Embed(): unsupported operation") -} - -func (k *pointGT) Data() ([]byte, error) { - panic("bls12-381.GT.Data(): unsupported operation") -} diff --git a/pairing/bls12381/suite.go b/pairing/bls12381/suite.go deleted file mode 100644 index 86efbd702..000000000 --- a/pairing/bls12381/suite.go +++ /dev/null @@ -1,175 +0,0 @@ -package bls12381 - -import ( - "crypto/cipher" - "crypto/sha256" - "hash" - "io" - "reflect" - - bls12381 "github.com/kilic/bls12-381" - "go.dedis.ch/fixbuf" - "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/util/random" - "go.dedis.ch/kyber/v3/xof/blake2xb" -) - -// Suite implements the pairing.Suite interface for the Bls12381 bilinear pairing. -type Suite struct { - *commonSuite - g1 *groupG1 - g2 *groupG2 - gt *groupGT -} - -// NewSuite generates and returns a new Bls12381 pairing suite. -func NewSuite() *Suite { - s := &Suite{commonSuite: &commonSuite{}} - s.g1 = &groupG1{commonSuite: s.commonSuite, common: common{isPrime: true}} - s.g2 = &groupG2{commonSuite: s.commonSuite, common: common{isPrime: false}} - s.gt = &groupGT{commonSuite: s.commonSuite, common: common{isPrime: false}} - return s -} - -// NewSuiteG1 returns a G1 suite. -func NewSuiteG1() *Suite { - s := NewSuite() - s.commonSuite.Group = &groupG1{commonSuite: &commonSuite{}, common: common{isPrime: true}} - return s -} - -// NewSuiteG2 returns a G2 suite. -func NewSuiteG2() *Suite { - s := NewSuite() - s.commonSuite.Group = &groupG2{commonSuite: &commonSuite{}, common: common{isPrime: true}} - return s -} - -// NewSuiteGT returns a GT suite. -func NewSuiteGT() *Suite { - s := NewSuite() - s.commonSuite.Group = &groupGT{commonSuite: &commonSuite{}, common: common{isPrime: true}} - return s -} - -// NewSuiteRand generates and returns a new BN256 suite seeded by the -// given cipher stream. -func NewSuiteRand(rand cipher.Stream) *Suite { - s := &Suite{commonSuite: &commonSuite{s: rand}} - s.g1 = &groupG1{commonSuite: s.commonSuite, common: common{isPrime: true}} - s.g2 = &groupG2{commonSuite: s.commonSuite, common: common{isPrime: false}} - s.gt = &groupGT{commonSuite: s.commonSuite, common: common{isPrime: false}} - return s -} - -// G1 returns the group G1 of the BN256 pairing. -func (s *Suite) G1() kyber.Group { - return s.g1 -} - -// G2 returns the group G2 of the BN256 pairing. -func (s *Suite) G2() kyber.Group { - return s.g2 -} - -// GT returns the group GT of the BN256 pairing. -func (s *Suite) GT() kyber.Group { - return s.gt -} - -// ValidatePairing is a simpler way to verify a pairing equation. -// e(p1,p2) =?= e(inv1^-1, inv2^-1) -func (s *Suite) ValidatePairing(p1, p2, p3, p4 kyber.Point) bool { - e := bls12381.NewEngine() - e.AddPair(p1.(*pointG1).p, p2.(*pointG2).p) - e.AddPairInv(p3.(*pointG1).p, p4.(*pointG2).p) - return e.Check() -} - -// Pair takes the points p1 and p2 in groups G1 and G2, respectively, as input -// and computes their pairing in GT. -func (s *Suite) Pair(p1, p2 kyber.Point) kyber.Point { - e := bls12381.NewEngine() - g1point := p1.(*pointG1).p - g2point := p2.(*pointG2).p - return newPointGT(e.AddPair(g1point, g2point).Result()) -} - -// Not used other than for reflect.TypeOf() -var aScalar kyber.Scalar -var aPoint kyber.Point -var aPointG1 pointG1 -var aPointG2 pointG2 -var aPointGT pointGT - -var tScalar = reflect.TypeOf(&aScalar).Elem() -var tPoint = reflect.TypeOf(&aPoint).Elem() -var tPointG1 = reflect.TypeOf(&aPointG1).Elem() -var tPointG2 = reflect.TypeOf(&aPointG2).Elem() -var tPointGT = reflect.TypeOf(&aPointGT).Elem() - -type commonSuite struct { - s cipher.Stream - // kyber.Group is only set if we have a combined Suite - kyber.Group -} - -// New implements the kyber.Encoding interface. -func (c *commonSuite) New(t reflect.Type) interface{} { - if c.Group == nil { - panic("cannot create Point from NewGroup - please use bls12381.NewGroupG1") - } - switch t { - case tScalar: - return c.Scalar() - case tPoint: - return c.Point() - case tPointG1: - g1 := groupG1{} - return g1.Point() - case tPointG2: - g2 := groupG2{} - return g2.Point() - case tPointGT: - gt := groupGT{} - return gt.Point() - } - return nil -} - -// Read is the default implementation of kyber.Encoding interface Read. -func (c *commonSuite) Read(r io.Reader, objs ...interface{}) error { - return fixbuf.Read(r, c, objs...) -} - -// Write is the default implementation of kyber.Encoding interface Write. -func (c *commonSuite) Write(w io.Writer, objs ...interface{}) error { - return fixbuf.Write(w, objs) -} - -// Hash returns a newly instantiated sha256 hash function. -func (c *commonSuite) Hash() hash.Hash { - return sha256.New() -} - -// XOF returns a newlly instantiated blake2xb XOF function. -func (c *commonSuite) XOF(seed []byte) kyber.XOF { - return blake2xb.New(seed) -} - -// RandomStream returns a cipher.Stream which corresponds to a key stream from -// crypto/rand. -func (c *commonSuite) RandomStream() cipher.Stream { - if c.s != nil { - return c.s - } - return random.New() -} - -// String returns a recognizable string that this is a combined suite. -func (c commonSuite) String() string { - if c.Group != nil { - return c.Group.String() - } - return "bls12381" -} diff --git a/pairing/adapter.go b/pairing/bn256/adapter.go similarity index 90% rename from pairing/adapter.go rename to pairing/bn256/adapter.go index ade840742..819797cc0 100644 --- a/pairing/adapter.go +++ b/pairing/bn256/adapter.go @@ -1,9 +1,6 @@ -package pairing +package bn256 -import ( - "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/pairing/bn256" -) +import "go.dedis.ch/kyber/v3" // SuiteBn256 is an adapter that implements the suites.Suite interface so that // bn256 can be used as a common suite to generate key pairs for instance but @@ -13,14 +10,14 @@ import ( // compatible with public keys only (group G2) where the signature must be // used as a point from the group G1. type SuiteBn256 struct { - Suite + *Suite kyber.Group } // NewSuiteBn256 makes a new BN256 suite func NewSuiteBn256() *SuiteBn256 { return &SuiteBn256{ - Suite: bn256.NewSuite(), + Suite: NewSuite(), } } diff --git a/pairing/adapter_test.go b/pairing/bn256/adapter_test.go similarity index 97% rename from pairing/adapter_test.go rename to pairing/bn256/adapter_test.go index 97bbbb728..43112fd42 100644 --- a/pairing/adapter_test.go +++ b/pairing/bn256/adapter_test.go @@ -1,4 +1,4 @@ -package pairing +package bn256 import ( "testing" diff --git a/pairing/bn256/bls_test.go b/pairing/bn256/bls_test.go new file mode 100644 index 000000000..66f16bfa3 --- /dev/null +++ b/pairing/bn256/bls_test.go @@ -0,0 +1,42 @@ +package bn256 + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3/sign/bls" + "go.dedis.ch/kyber/v3/sign/test" + "go.dedis.ch/kyber/v3/util/random" +) + +func TestBLSSchemeBN256G1(t *testing.T) { + suite := NewSuite() + s := bls.NewSchemeOnG1(suite) + test.SchemeTesting(t, s) +} + +func TestBinaryMarshalAfterAggregation_issue400(t *testing.T) { + suite := NewSuite() + s := bls.NewSchemeOnG1(suite) + _, public1 := s.NewKeyPair(random.New()) + _, public2 := s.NewKeyPair(random.New()) + + workingKey := s.AggregatePublicKeys(public1, public2, public1) + + workingBits, err := workingKey.MarshalBinary() + require.Nil(t, err) + + workingPoint := suite.G2().Point() + err = workingPoint.UnmarshalBinary(workingBits) + require.Nil(t, err) + + // this was failing before the fix + aggregatedKey := s.AggregatePublicKeys(public1, public1, public2) + + bits, err := aggregatedKey.MarshalBinary() + require.Nil(t, err) + + point := suite.G2().Point() + err = point.UnmarshalBinary(bits) + require.Nil(t, err) +} diff --git a/pairing/bn256/gfp.go b/pairing/bn256/gfp.go index f17d44657..d6df830a7 100644 --- a/pairing/bn256/gfp.go +++ b/pairing/bn256/gfp.go @@ -2,6 +2,7 @@ package bn256 import ( "fmt" + "math/big" ) type gfP [4]uint64 @@ -18,6 +19,23 @@ func newGFp(x int64) (out *gfP) { return out } +func newGFpFromBigInt(bigInt *big.Int) *gfP { + leftPad32 := func(in []byte) []byte { + if len(in) > 32 { + panic("input cannot be more than 32 bytes") + } + + o := make([]byte, 32) + copy(o[32-len(in):], in) + return o + } + + out := new(gfP) + out.Unmarshal(leftPad32(bigInt.Bytes())) + montEncode(out, out) + return out +} + func (e *gfP) String() string { return fmt.Sprintf("%16.16x%16.16x%16.16x%16.16x", e[3], e[2], e[1], e[0]) } @@ -66,5 +84,15 @@ func (e *gfP) Unmarshal(in []byte) { } } +func (e *gfP) BigInt() *big.Int { + bigInt := new(big.Int) + decoded := new(gfP) + montDecode(decoded, e) + buf := make([]byte, 32) + decoded.Marshal(buf) + bigInt.SetBytes(buf) + return bigInt +} + func montEncode(c, a *gfP) { gfpMul(c, a, r2) } func montDecode(c, a *gfP) { gfpMul(c, a, &gfP{1}) } diff --git a/pairing/bn256/gfp_decl.go b/pairing/bn256/gfp_decl.go index be1b80906..652089de2 100644 --- a/pairing/bn256/gfp_decl.go +++ b/pairing/bn256/gfp_decl.go @@ -1,3 +1,4 @@ +//go:build (amd64 && !generic) || (arm64 && !generic) // +build amd64,!generic arm64,!generic package bn256 diff --git a/pairing/bn256/gfp_generic.go b/pairing/bn256/gfp_generic.go index 8e6be9596..7742dda4c 100644 --- a/pairing/bn256/gfp_generic.go +++ b/pairing/bn256/gfp_generic.go @@ -1,3 +1,4 @@ +//go:build (!amd64 && !arm64) || generic // +build !amd64,!arm64 generic package bn256 diff --git a/pairing/bn256/point.go b/pairing/bn256/point.go index c3324e208..45210d804 100644 --- a/pairing/bn256/point.go +++ b/pairing/bn256/point.go @@ -62,21 +62,58 @@ func (p *pointG1) Clone() kyber.Point { } func (p *pointG1) EmbedLen() int { - panic("bn256.G1: unsupported operation") + // 2^255 is ~size of the curve P + // minus one byte for randomness + // minus one byte for len(data) + return (255 - 8 - 8) / 8 } func (p *pointG1) Embed(data []byte, rand cipher.Stream) kyber.Point { - // XXX: An approach to implement this is: - // - Encode data as the x-coordinate of a point on y²=x³+3 where len(data) - // is stored in the least significant byte of x and the rest is being - // filled with random values, i.e., x = rand || data || len(data). - // - Use the Tonelli-Shanks algorithm to compute the y-coordinate. - // - Convert the new point to Jacobian coordinates and set it as p. - panic("bn256.G1: unsupported operation") + // How many bytes to embed? + dl := p.EmbedLen() + if dl > len(data) { + dl = len(data) + } + + for { + // Pick a random point, with optional embedded data + var b [32]byte + rand.XORKeyStream(b[:], b[:]) + if data != nil { + b[0] = byte(dl) // Encode length in low 8 bits + copy(b[1:1+dl], data) // Copy in data to embed + } + x := new(big.Int).SetBytes(b[:]) + + y := deriveY(x) + if y != nil { + p.g.x = *newGFpFromBigInt(x) + p.g.y = *newGFpFromBigInt(y) + p.g.z = *newGFp(1) + if p.g.IsOnCurve() { + return p + } + } + } } func (p *pointG1) Data() ([]byte, error) { - panic("bn256.G1: unsupported operation") + var b [32]byte + + pgtemp := *p.g + pgtemp.MakeAffine() + if pgtemp.IsInfinity() { + return b[:], nil + } + tmp := &gfP{} + montDecode(tmp, &pgtemp.x) + tmp.Marshal(b[:]) + + dl := int(b[0]) // extract length byte + if dl > p.EmbedLen() { + return nil, errors.New("invalid embedded data length") + } + return b[1 : 1+dl], nil } func (p *pointG1) Add(a, b kyber.Point) kyber.Point { @@ -224,27 +261,12 @@ func (p *pointG1) Hash(m []byte) kyber.Point { // hashes a byte slice into two points on a curve represented by big.Int // ideally we want to do this using gfP, but gfP doesn't have a ModSqrt function func hashToPoint(m []byte) (*big.Int, *big.Int) { - // we need to convert curveB into a bigInt for our computation - intCurveB := new(big.Int) - { - decodedCurveB := new(gfP) - montDecode(decodedCurveB, curveB) - bufCurveB := make([]byte, 32) - decodedCurveB.Marshal(bufCurveB) - intCurveB.SetBytes(bufCurveB) - } - h := sha256.Sum256(m) x := new(big.Int).SetBytes(h[:]) x.Mod(x, p) for { - xxx := new(big.Int).Mul(x, x) - xxx.Mul(xxx, x) - xxx.Mod(xxx, p) - - t := new(big.Int).Add(xxx, intCurveB) - y := new(big.Int).ModSqrt(t, p) + y := deriveY(x) if y != nil { return x, y } @@ -253,6 +275,17 @@ func hashToPoint(m []byte) (*big.Int, *big.Int) { } } +func deriveY(x *big.Int) *big.Int { + intCurveB := curveB.BigInt() + xxx := new(big.Int).Mul(x, x) + xxx.Mul(xxx, x) + xxx.Mod(xxx, p) + + t := new(big.Int).Add(xxx, intCurveB) + y := new(big.Int).ModSqrt(t, p) + return y +} + type pointG2 struct { g *twistPoint } diff --git a/pairing/bn256/point_test.go b/pairing/bn256/point_test.go index 93450852c..abda3ac06 100644 --- a/pairing/bn256/point_test.go +++ b/pairing/bn256/point_test.go @@ -4,6 +4,8 @@ import ( "bytes" "encoding/hex" "testing" + + "go.dedis.ch/kyber/v3/util/random" ) func TestPointG1_HashToPoint(t *testing.T) { @@ -39,3 +41,17 @@ func TestPointG1_HashToPoint(t *testing.T) { t.Error("hash does not match reference") } } + +func TestPointG1_EmbedData(t *testing.T) { + m := []byte("The quick brown fox") + // Embed m onto prime group + M := newPointG1().Embed(m, random.New()) + + // Retrieve message encoded in x coordinate + mm, err := M.Data() + if err != nil { + t.Error(err) + } else if string(mm) != string(m) { + t.Error("G1: Embed/Data produced wrong output: ", string(mm), " expected ", string(m)) + } +} diff --git a/pairing/bn256/suite.go b/pairing/bn256/suite.go index 8f46bf721..048d981e9 100644 --- a/pairing/bn256/suite.go +++ b/pairing/bn256/suite.go @@ -98,6 +98,10 @@ func (s *Suite) Pair(p1 kyber.Point, p2 kyber.Point) kyber.Point { return s.GT().Point().(*pointGT).Pair(p1, p2) } +func (s *Suite) ValidatePairing(p1, p2, inv1, inv2 kyber.Point) bool { + return s.Pair(p1, p2).Equal(s.Pair(inv1, inv2)) +} + // Not used other than for reflect.TypeOf() var aScalar kyber.Scalar var aPoint kyber.Point diff --git a/pairing/pairing.go b/pairing/pairing.go index d0e70707b..3c6f9af87 100644 --- a/pairing/pairing.go +++ b/pairing/pairing.go @@ -10,6 +10,9 @@ type Suite interface { G2() kyber.Group GT() kyber.Group Pair(p1, p2 kyber.Point) kyber.Point + // ValidatePairing is a simpler way to verify a pairing equation. + // e(p1,p2) =?= e(inv1^-1, inv2^-1) + ValidatePairing(p1, p2, inv1, inv2 kyber.Point) bool kyber.Encoding kyber.HashFactory kyber.XOFFactory diff --git a/share/dkg/pedersen/dkg.go b/share/dkg/pedersen/dkg.go index 65baf3c01..a381d4f77 100644 --- a/share/dkg/pedersen/dkg.go +++ b/share/dkg/pedersen/dkg.go @@ -258,9 +258,9 @@ func NewDistKeyGenerator(suite Suite, longterm kyber.Scalar, participants []kybe // and is ommitted from the returned map. To know which participant a deal // belongs to, loop over the keys as indices in the list of new participants: // -// for i,dd := range distDeals { -// sendTo(participants[i],dd) -// } +// for i,dd := range distDeals { +// sendTo(participants[i],dd) +// } // // If this method cannot process its own Deal, that indicates a // severe problem with the configuration or implementation and @@ -292,7 +292,10 @@ func (d *DistKeyGenerator) Deals() (map[int]*Deal, error) { return nil, err } - if i == int(d.nidx) && d.newPresent { + // if there is a resharing in progress, nodes that stay must send their + // deals to the old nodes, otherwise old nodes won't get responses from + // staying nodes and won't be certified. + if i == int(d.nidx) && d.newPresent && !d.isResharing { if d.processed { continue } @@ -338,7 +341,10 @@ func (d *DistKeyGenerator) ProcessDeal(dd *Deal) (*Response, error) { return nil, err } - ver, _ := d.verifiers[dd.Index] + ver, ok := d.verifiers[dd.Index] + if !ok { + return nil, fmt.Errorf("missing verifiers") + } resp, err := ver.ProcessEncryptedDeal(dd.Deal) if err != nil { @@ -378,11 +384,14 @@ func (d *DistKeyGenerator) ProcessDeal(dd *Deal) (*Response, error) { } } - // if the dealer in the old list is also present in the new list, then set + // If the dealer in the old list is also present in the new list, then set // his response to approval since he won't issue his own response for his - // own deal + // own deal. + // In the case of resharing the dealer will issue his own response in order + // for the old comities to get responses and be certified, which is why we + // don't add it manually there. newIdx, found := findPub(d.c.NewNodes, pub) - if found { + if found && !d.isResharing { d.verifiers[dd.Index].UnsafeSetResponseDKG(uint32(newIdx), vss.StatusApproval) } @@ -807,7 +816,7 @@ func (d *DistKeyGenerator) initVerifiers(c *Config) error { return nil } -//Renew adds the new distributed key share g (with secret 0) to the distributed key share d. +// Renew adds the new distributed key share g (with secret 0) to the distributed key share d. func (d *DistKeyShare) Renew(suite Suite, g *DistKeyShare) (*DistKeyShare, error) { // Check G(0) = 0*G. if !g.Public().Equal(suite.Point().Base().Mul(suite.Scalar().Zero(), nil)) { diff --git a/share/dkg/pedersen/dkg_test.go b/share/dkg/pedersen/dkg_test.go index 148883993..a3572de67 100644 --- a/share/dkg/pedersen/dkg_test.go +++ b/share/dkg/pedersen/dkg_test.go @@ -202,6 +202,7 @@ func TestDKGProcessResponse(t *testing.T) { require.Nil(t, err) resp12, err := rec.ProcessDeal(deals2[idxRec]) + require.NoError(t, err) require.NotNil(t, resp) require.Equal(t, vss.StatusComplaint, resp12.Response.Status) require.Equal(t, deals2[idxRec].Index, uint32(dkg2.nidx)) @@ -797,7 +798,7 @@ func TestDKGResharingNewNodesThreshold(t *testing.T) { require.Equal(t, newDkgs[i].nidx, i) } - //alive := oldT - 1 + // alive := oldT - 1 alive := oldT oldSelected := make([]*DistKeyGenerator, 0, alive) selected := make(map[string]bool) @@ -897,35 +898,43 @@ func TestDKGResharingNewNodesThreshold(t *testing.T) { } -// Test resharing to a different set of nodes with one common +// Test resharing to a different set of nodes with two common. func TestDKGResharingNewNodes(t *testing.T) { oldPubs, oldPrivs, dkgs := generate(defaultN, vss.MinimumT(defaultN)) fullExchange(t, dkgs, true) shares := make([]*DistKeyShare, len(dkgs)) sshares := make([]*share.PriShare, len(dkgs)) + for i, dkg := range dkgs { share, err := dkg.DistKeyShare() require.NoError(t, err) shares[i] = share sshares[i] = shares[i].Share } + // start resharing to a different group + oldN := defaultN oldT := len(shares[0].Commits) newN := oldN + 1 newT := oldT + 1 newPrivs := make([]kyber.Scalar, newN) newPubs := make([]kyber.Point, newN) + + // new[0], new[1] = old[-1], old[-2] newPrivs[0] = oldPrivs[oldN-1] newPubs[0] = oldPubs[oldN-1] - for i := 1; i < newN; i++ { + newPrivs[1] = oldPrivs[oldN-2] + newPubs[1] = oldPubs[oldN-2] + + for i := 2; i < newN; i++ { newPrivs[i], newPubs[i] = genPair() } - // creating the old dkgs and new dkgs + // creating the old dkgs + oldDkgs := make([]*DistKeyGenerator, oldN) - newDkgs := make([]*DistKeyGenerator, newN) var err error for i := 0; i < oldN; i++ { c := &Config{ @@ -937,26 +946,37 @@ func TestDKGResharingNewNodes(t *testing.T) { Threshold: newT, OldThreshold: oldT, } + oldDkgs[i], err = NewDistKeyHandler(c) require.NoError(t, err) - if i == oldN-1 { + + // because the node's public key is already in newPubs + if i >= oldN-2 { require.True(t, oldDkgs[i].canReceive) require.True(t, oldDkgs[i].canIssue) require.True(t, oldDkgs[i].isResharing) require.True(t, oldDkgs[i].newPresent) require.Equal(t, oldDkgs[i].oidx, i) - require.Equal(t, 0, oldDkgs[i].nidx) + require.Equal(t, oldN-i-1, oldDkgs[i].nidx) continue } + require.False(t, oldDkgs[i].canReceive) require.True(t, oldDkgs[i].canIssue) require.True(t, oldDkgs[i].isResharing) require.False(t, oldDkgs[i].newPresent) + require.Equal(t, 0, oldDkgs[i].nidx) // default for nidx require.Equal(t, oldDkgs[i].oidx, i) } - // the first one is the last old one - newDkgs[0] = oldDkgs[oldN-1] - for i := 1; i < newN; i++ { + + // creating the new dkg + + newDkgs := make([]*DistKeyGenerator, newN) + + newDkgs[0] = oldDkgs[oldN-1] // the first one is the last old one + newDkgs[1] = oldDkgs[oldN-2] // the second one is the before-last old one + + for i := 2; i < newN; i++ { c := &Config{ Suite: suite, Longterm: newPrivs[i], @@ -966,27 +986,40 @@ func TestDKGResharingNewNodes(t *testing.T) { Threshold: newT, OldThreshold: oldT, } + newDkgs[i], err = NewDistKeyHandler(c) + require.NoError(t, err) require.True(t, newDkgs[i].canReceive) require.False(t, newDkgs[i].canIssue) require.True(t, newDkgs[i].isResharing) require.True(t, newDkgs[i].newPresent) require.Equal(t, newDkgs[i].nidx, i) + // each old dkg act as a verifier + require.Len(t, newDkgs[i].Verifiers(), oldN) } // full secret sharing exchange + // 1. broadcast deals - deals := make([]map[int]*Deal, 0, newN*newN) - for _, dkg := range oldDkgs { + deals := make([]map[int]*Deal, len(oldDkgs)) + + for i, dkg := range oldDkgs { localDeals, err := dkg.Deals() - require.Nil(t, err) - deals = append(deals, localDeals) + require.NoError(t, err) + + // each old DKG will sent a deal to each other dkg, including + // themselves. + require.Len(t, localDeals, newN) + + deals[i] = localDeals + v, exists := dkg.verifiers[uint32(dkg.oidx)] - if dkg.canReceive && dkg.nidx == 0 { - // this node should save its own response for its own deal - lenResponses := len(v.Aggregator.Responses()) - require.Equal(t, 1, lenResponses) + if dkg.canReceive && dkg.nidx <= 1 { + // staying nodes don't save their responses locally because they + // will broadcast them for the old comities. + require.Len(t, v.Responses(), 0) + require.True(t, exists) } else { // no verifiers since these dkg are not in in the new list require.False(t, exists) @@ -995,11 +1028,12 @@ func TestDKGResharingNewNodes(t *testing.T) { // the index key indicates the dealer index for which the responses are for resps := make(map[int][]*Response) + for i, localDeals := range deals { - for j, d := range localDeals { - dkg := newDkgs[j] + for dest, d := range localDeals { + dkg := newDkgs[dest] resp, err := dkg.ProcessDeal(d) - require.Nil(t, err) + require.NoError(t, err) require.Equal(t, vss.StatusApproval, resp.Response.Status) resps[i] = append(resps[i], resp) } @@ -1008,37 +1042,27 @@ func TestDKGResharingNewNodes(t *testing.T) { // all new dkgs should have the same length of verifiers map for _, dkg := range newDkgs { // one deal per old participants - require.Equal(t, oldN, len(dkg.verifiers), "dkg nidx %d failing", dkg.nidx) + require.Len(t, dkg.verifiers, oldN, "dkg nidx %d failing", dkg.nidx) } // 2. Broadcast responses for _, dealResponses := range resps { for _, resp := range dealResponses { - for _, dkg := range oldDkgs { - // Ignore messages from ourselves - if resp.Response.Index == uint32(dkg.nidx) { - continue - } + // the two last ones will be processed while doing this step on the + // newDkgs, since they are in the new set. + for _, dkg := range oldDkgs[:oldN-2] { j, err := dkg.ProcessResponse(resp) - //fmt.Printf("old dkg %d process responses from new dkg %d about deal %d\n", dkg.oidx, dkg.nidx, resp.Index) - if err != nil { - fmt.Printf("old dkg at (oidx %d, nidx %d) has received response from idx %d for dealer idx %d\n", dkg.oidx, dkg.nidx, resp.Response.Index, resp.Index) - } - require.Nil(t, err) + require.NoError(t, err, "old dkg at (oidx %d, nidx %d) has received response from idx %d for dealer idx %d\n", dkg.oidx, dkg.nidx, resp.Response.Index, resp.Index) require.Nil(t, j) } - for _, dkg := range newDkgs[1:] { + for _, dkg := range newDkgs { // Ignore messages from ourselves if resp.Response.Index == uint32(dkg.nidx) { continue } j, err := dkg.ProcessResponse(resp) - //fmt.Printf("new dkg %d process responses from new dkg %d about deal %d\n", dkg.nidx, dkg.nidx, resp.Index) - if err != nil { - fmt.Printf("new dkg at nidx %d has received response from idx %d for deal %d\n", dkg.nidx, resp.Response.Index, resp.Index) - } - require.Nil(t, err) + require.NoError(t, err, "new dkg at nidx %d has received response from idx %d for deal %d\n", dkg.nidx, resp.Response.Index, resp.Index) require.Nil(t, j) } @@ -1058,6 +1082,16 @@ func TestDKGResharingNewNodes(t *testing.T) { } } + // make sure the new dkg members can certify + for _, dkg := range newDkgs { + require.True(t, dkg.Certified(), "new dkg %d can't certify", dkg.nidx) + } + + // make sure the old dkg members can certify + for _, dkg := range oldDkgs { + require.True(t, dkg.Certified(), "old dkg %d can't certify", dkg.oidx) + } + newShares := make([]*DistKeyShare, newN) newSShares := make([]*share.PriShare, newN) for i := range newDkgs { @@ -1066,6 +1100,7 @@ func TestDKGResharingNewNodes(t *testing.T) { newShares[i] = dks newSShares[i] = newShares[i].Share } + // check shares reconstruct to the same secret oldSecret, err := share.RecoverSecret(suite, sshares, oldT, oldN) require.NoError(t, err) @@ -1096,12 +1131,8 @@ func TestDKGResharingPartialNewNodes(t *testing.T) { newPrivs := make([]kyber.Scalar, 0, newN) newPubs := make([]kyber.Point, 0, newN) - for _, priv := range oldPrivs[1:] { - newPrivs = append(newPrivs, priv) - } - for _, pub := range oldPubs[1:] { - newPubs = append(newPubs, pub) - } + newPrivs = append(newPrivs, oldPrivs[1:]...) + newPubs = append(newPubs, oldPubs[1:]...) // add two new nodes priv1, pub1 := genPair() priv2, pub2 := genPair() @@ -1138,6 +1169,7 @@ func TestDKGResharingPartialNewNodes(t *testing.T) { require.False(t, totalDkgs[i].newPresent) require.Equal(t, totalDkgs[i].oidx, i) } + // the first one is the last old one for i := oldN; i < total; i++ { newIdx := i - oldN + newOffset @@ -1172,10 +1204,11 @@ func TestDKGResharingPartialNewNodes(t *testing.T) { deals = append(deals, localDeals) v, exists := dkg.verifiers[uint32(dkg.oidx)] if dkg.canReceive && dkg.newPresent { - // this node should save its own response for its own deal + // staying nodes don't process their responses locally because they + // broadcast them for the old comities to receive the responses. lenResponses := len(v.Aggregator.Responses()) require.True(t, exists) - require.Equal(t, 1, lenResponses) + require.Equal(t, 0, lenResponses) } else { require.False(t, exists) } diff --git a/share/dkg/rabin/dkg.go b/share/dkg/rabin/dkg.go index 376d1e1a4..969044b1e 100644 --- a/share/dkg/rabin/dkg.go +++ b/share/dkg/rabin/dkg.go @@ -282,7 +282,6 @@ func (d *DistKeyGenerator) ProcessDeal(dd *Deal) (*Response, error) { return nil, err } - d.verifiers[dd.Index] = ver resp, err := ver.ProcessEncryptedDeal(dd.Deal) if err != nil { return nil, err @@ -290,8 +289,9 @@ func (d *DistKeyGenerator) ProcessDeal(dd *Deal) (*Response, error) { // Set StatusApproval for the verifier that represents the participant // that distibuted the Deal - d.verifiers[dd.Index].UnsafeSetResponseDKG(dd.Index, true) + ver.UnsafeSetResponseDKG(dd.Index, true) + d.verifiers[dd.Index] = ver return &Response{ Index: dd.Index, Response: resp, diff --git a/share/dkg/rabin/dkg_test.go b/share/dkg/rabin/dkg_test.go index a90e0a03a..5a5ecb976 100644 --- a/share/dkg/rabin/dkg_test.go +++ b/share/dkg/rabin/dkg_test.go @@ -89,20 +89,6 @@ func TestDKGProcessDeal(t *testing.T) { assert.Error(t, err) rec.participants = goodP - // good deal - resp, err = rec.ProcessDeal(deal) - assert.NotNil(t, resp) - assert.Equal(t, true, resp.Response.Approved) - assert.Nil(t, err) - _, ok := rec.verifiers[deal.Index] - require.True(t, ok) - assert.Equal(t, uint32(0), resp.Index) - - // duplicate - resp, err = rec.ProcessDeal(deal) - assert.Nil(t, resp) - assert.Error(t, err) - // wrong index goodIdx := deal.Index deal.Index = uint32(nbParticipants + 1) @@ -119,6 +105,20 @@ func TestDKGProcessDeal(t *testing.T) { assert.Error(t, err) deal.Deal.Signature = goodSig + // good deal + resp, err = rec.ProcessDeal(deal) + assert.NotNil(t, resp) + assert.Equal(t, true, resp.Response.Approved) + assert.Nil(t, err) + _, ok := rec.verifiers[deal.Index] + require.True(t, ok) + assert.Equal(t, uint32(0), resp.Index) + + // duplicate + resp, err = rec.ProcessDeal(deal) + assert.Nil(t, resp) + assert.Error(t, err) + } func TestDKGProcessResponse(t *testing.T) { @@ -643,7 +643,7 @@ func genPair() (kyber.Scalar, kyber.Point) { func randomBytes(n int) []byte { var buff = make([]byte, n) - _, _ = rand.Read(buff[:]) + _, _ = rand.Read(buff) return buff } func checkDks(dks1, dks2 *DistKeyShare) bool { diff --git a/share/poly.go b/share/poly.go index 5a0b01bfd..84854eba5 100644 --- a/share/poly.go +++ b/share/poly.go @@ -40,7 +40,7 @@ func (p *PriShare) Hash(s kyber.HashFactory) []byte { } func (p *PriShare) String() string { - return fmt.Sprintf("{%d:%p}", p.I, p.V) + return fmt.Sprintf("{%d:%s}", p.I, p.V) } // PriPoly represents a secret sharing polynomial. diff --git a/share/vss/pedersen/vss.go b/share/vss/pedersen/vss.go index 91c9ec637..611c8c756 100644 --- a/share/vss/pedersen/vss.go +++ b/share/vss/pedersen/vss.go @@ -113,10 +113,10 @@ type Justification struct { // NewDealer returns a Dealer capable of leading the secret sharing scheme. It // does not have to be trusted by other Verifiers. The security parameter t is -// the number of shares required to reconstruct the secret. It is HIGHLY -// RECOMMENDED to use a threshold higher or equal than what the method -// MinimumT() returns, otherwise it breaks the security assumptions of the whole -// scheme. It returns an error if the t is less than or equal to 2. +// the number of shares required to reconstruct the secret. MinimumT() provides +// a middle ground between robustness and secrecy. Increasing t will increase +// the secrecy at the cost of the decreased robustness and vice versa. It +// returns an error if the t is inferior or equal to 2. func NewDealer(suite Suite, longterm, secret kyber.Scalar, verifiers []kyber.Point, t int) (*Dealer, error) { d := &Dealer{ suite: suite, @@ -311,6 +311,7 @@ type Verifier struct { // - its longterm secret key // - the longterm dealer public key // - the list of public key of verifiers. The list MUST include the public key of this Verifier also. +// // The security parameter t of the secret sharing scheme is automatically set to // a default safe value. If a different t value is required, it is possible to set // it with `verifier.SetT()`. @@ -377,7 +378,7 @@ func (v *Verifier) ProcessEncryptedDeal(e *EncryptedDeal) (*Response, error) { r.Status = StatusComplaint } - if err == errDealAlreadyProcessed { + if errors.Is(err, errDealAlreadyProcessed) { return nil, err } @@ -713,11 +714,11 @@ func (a *Aggregator) MissingResponses() []int { return absents } -// MinimumT returns the minimum safe T that is proven to be secure with this -// protocol. It expects n, the total number of participants. -// WARNING: Setting a lower T could make -// the whole protocol insecure. Setting a higher T only makes it harder to -// reconstruct the secret. +// MinimumT returns a safe value of T that balances secrecy and robustness. +// It expects n, the total number of participants. +// T should be adjusted to your threat model. Setting a lower T decreases the +// difficulty for an adversary to break secrecy. However, a too large T makes +// it possible for an adversary to prevent recovery (robustness). func MinimumT(n int) int { return (n + 1) / 2 } diff --git a/share/vss/pedersen/vss_test.go b/share/vss/pedersen/vss_test.go index e97efdd51..9876c293c 100644 --- a/share/vss/pedersen/vss_test.go +++ b/share/vss/pedersen/vss_test.go @@ -383,6 +383,7 @@ func TestVSSAggregatorVerifyResponse(t *testing.T) { // wrong index resp.Index = uint32(len(verifiersPub)) sig, err := schnorr.Sign(suite, v.longterm, resp.Hash(suite)) + assert.NoError(t, err) resp.Signature = sig assert.Error(t, aggr.verifyResponse(resp)) resp.Index = 0 diff --git a/share/vss/rabin/vss.go b/share/vss/rabin/vss.go index d9e8f06e9..9de4b3980 100644 --- a/share/vss/rabin/vss.go +++ b/share/vss/rabin/vss.go @@ -8,24 +8,26 @@ // verifier can check the validity of the received share. The protocol has the // following steps: // -// 1) The dealer send a Deal to every verifiers using `Deals()`. Each deal must -// be sent securely to one verifier whose public key is at the same index than -// the index of the Deal. +// 1. The dealer send a Deal to every verifiers using `Deals()`. Each deal must +// be sent securely to one verifier whose public key is at the same index than +// the index of the Deal. // -// 2) Each verifier processes the Deal with `ProcessDeal`. -// This function returns a Response which can be twofold: -// - an approval, to confirm a correct deal -// - a complaint to announce an incorrect deal notifying others that the +// 2. Each verifier processes the Deal with `ProcessDeal`. +// This function returns a Response which can be twofold: +// - an approval, to confirm a correct deal +// - a complaint to announce an incorrect deal notifying others that the // dealer might be malicious. -// All Responses must be broadcasted to every verifiers and the dealer. -// 3) The dealer can respond to each complaint by a justification revealing the -// share he originally sent out to the accusing verifier. This is done by -// calling `ProcessResponse` on the `Dealer`. -// 4) The verifiers refuse the shared secret and abort the protocol if there -// are at least t complaints OR if a Justification is wrong. The verifiers -// accept the shared secret if there are at least t approvals at which point -// any t out of n verifiers can reveal their shares to reconstruct the shared -// secret. +// All Responses must be broadcasted to every verifiers and the dealer. +// +// 3. The dealer can respond to each complaint by a justification revealing the +// share he originally sent out to the accusing verifier. This is done by +// calling `ProcessResponse` on the `Dealer`. +// +// 4. The verifiers refuse the shared secret and abort the protocol if there +// are at least t complaints OR if a Justification is wrong. The verifiers +// accept the shared secret if there are at least t approvals at which point +// any t out of n verifiers can reveal their shares to reconstruct the shared +// secret. package vss import ( @@ -129,10 +131,10 @@ type Justification struct { // NewDealer returns a Dealer capable of leading the secret sharing scheme. It // does not have to be trusted by other Verifiers. The security parameter t is -// the number of shares required to reconstruct the secret. It is HIGHLY -// RECOMMENDED to use a threshold higher or equal than what the method -// MinimumT() returns, otherwise it breaks the security assumptions of the whole -// scheme. It returns an error if the t is inferior or equal to 2. +// the number of shares required to reconstruct the secret. MinimumT() provides +// a middle ground between robustness and secrecy. Increasing t will increase +// the secrecy at the cost of the decreased robustness and vice versa. It +// returns an error if the t is inferior or equal to 2. func NewDealer(suite Suite, longterm, secret kyber.Scalar, verifiers []kyber.Point, t int) (*Dealer, error) { d := &Dealer{ suite: suite, @@ -400,7 +402,7 @@ func (v *Verifier) ProcessEncryptedDeal(e *EncryptedDeal) (*Response, error) { r.Approved = false } - if err == errDealAlreadyProcessed { + if errors.Is(err, errDealAlreadyProcessed) { return nil, err } @@ -688,11 +690,11 @@ func (a *aggregator) UnsafeSetResponseDKG(idx uint32, approval bool) { a.addResponse(r) } -// MinimumT returns the minimum safe T that is proven to be secure with this -// protocol. It expects n, the total number of participants. -// WARNING: Setting a lower T could make -// the whole protocol insecure. Setting a higher T only makes it harder to -// reconstruct the secret. +// MinimumT returns a safe value of T that balances secrecy and robustness. +// It expects n, the total number of participants. +// T should be adjusted to your threat model. Setting a lower T decreases the +// difficulty for an adversary to break secrecy. However, a too large T makes +// it possible for an adversary to prevent recovery (robustness). func MinimumT(n int) int { return (n + 1) / 2 } diff --git a/share/vss/rabin/vss_test.go b/share/vss/rabin/vss_test.go index 87c4c5fc3..1f33cb7f1 100644 --- a/share/vss/rabin/vss_test.go +++ b/share/vss/rabin/vss_test.go @@ -303,6 +303,7 @@ func TestVSSAggregatorVerifyJustification(t *testing.T) { d.SecShare.V = goodV j, err := dealer.ProcessResponse(resp) + assert.NoError(t, err) // invalid deal justified goodV = j.Deal.SecShare.V @@ -388,6 +389,7 @@ func TestVSSAggregatorVerifyResponse(t *testing.T) { // wrong index resp.Index = uint32(len(verifiersPub)) sig, err := schnorr.Sign(suite, v.longterm, resp.Hash(suite)) + assert.NoError(t, err) resp.Signature = sig assert.Error(t, aggr.verifyResponse(resp)) resp.Index = 0 diff --git a/shuffle/sequence_test.go b/shuffle/sequence_test.go new file mode 100644 index 000000000..2daaea940 --- /dev/null +++ b/shuffle/sequence_test.go @@ -0,0 +1,69 @@ +package shuffle + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3" +) + +func TestAssertXY(t *testing.T) { + type tdata struct { + x [][]kyber.Point + y [][]kyber.Point + errStr string + } + + // express possible wrong cases and the expected errors + + table := []tdata{ + { + x: nil, + y: nil, + errStr: "X is empty", + }, + { + x: [][]kyber.Point{{}}, + y: [][]kyber.Point{{}}, + errStr: "X is empty", + }, + { + x: [][]kyber.Point{make([]kyber.Point, 1)}, + y: [][]kyber.Point{{}}, + errStr: "Y is empty", + }, + { + x: [][]kyber.Point{make([]kyber.Point, 1)}, + y: nil, + errStr: "Y is empty", + }, + { + x: [][]kyber.Point{make([]kyber.Point, 1), make([]kyber.Point, 2)}, + y: [][]kyber.Point{make([]kyber.Point, 1)}, + errStr: "X and Y have a different size: 2 != 1", + }, + { + x: [][]kyber.Point{make([]kyber.Point, 1)}, + y: [][]kyber.Point{make([]kyber.Point, 2)}, + errStr: "Y[0] has unexpected size: 1 != 2", + }, + { + x: [][]kyber.Point{make([]kyber.Point, 1), make([]kyber.Point, 2)}, + y: [][]kyber.Point{make([]kyber.Point, 1), make([]kyber.Point, 1)}, + errStr: "X[1] has unexpected size: 1 != 2", + }, + } + + for _, entry := range table { + err := assertXY(entry.x, entry.y) + require.EqualError(t, err, entry.errStr) + } + + // check valid data + + x := [][]kyber.Point{make([]kyber.Point, 2), make([]kyber.Point, 2)} + y := [][]kyber.Point{make([]kyber.Point, 2), make([]kyber.Point, 2)} + + err := assertXY(x, y) + require.NoError(t, err) +} diff --git a/shuffle/sequences.go b/shuffle/sequences.go new file mode 100644 index 000000000..9519f22ad --- /dev/null +++ b/shuffle/sequences.go @@ -0,0 +1,187 @@ +package shuffle + +import ( + "crypto/cipher" + "errors" + "fmt" + "math/big" + + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/proof" + "go.dedis.ch/kyber/v3/util/random" +) + +// SequencesShuffle shuffles a sequence of ElGamal pairs based on Section 5 of +// "Verifiable Mixing (Shuffling) of ElGamal Pairs" by Andrew Neff (April 2004) +// +// The function expects X and Y to be the same dimension, with each row having +// the same length. It also expect X and Y to have at least one element. The +// function will panic if the expectations are not met. +// +// Dim X and Y: [, ] +// +// The number of rows defines the sequences length. The number of columns +// defines the number of sequences. +// +// Seq 1 Seq 2 Seq 3 +// (0,0) (0,1) (0,2) +// (1,0) (1,1) (1,2) +// (2,0) (2,1) (2,2) +// +// # In the code coordinates are (j,i), where 0 ≤ j ≤ NQ-1, 0 ≤ i ≤ k-1 +// +// Last coordinate is (NQ-1, k-1) +// +// Variable names are as representative to the paper as possible. +func SequencesShuffle(group kyber.Group, g, h kyber.Point, X, Y [][]kyber.Point, + rand cipher.Stream) (Xbar, Ybar [][]kyber.Point, getProver func(e []kyber.Scalar) ( + proof.Prover, error)) { + + err := assertXY(X, Y) + if err != nil { + panic(fmt.Sprintf("invalid data: %v", err)) + } + + NQ := len(X) + k := len(X[0]) + + // Pick a random permutation used in ALL k ElGamal sequences. The permutation + // (π) of an ElGamal pair at index i always outputs to the same index + pi := make([]int, k) + for i := 0; i < k; i++ { + pi[i] = i + } + + // Fisher–Yates shuffle + for i := k - 1; i > 0; i-- { + j := int(random.Int(big.NewInt(int64(i+1)), rand).Int64()) + if j != i { + pi[i], pi[j] = pi[j], pi[i] + } + } + + // Pick a fresh ElGamal blinding factor β(j, i) for each ElGamal sequence + // and each ElGamal pair + beta := make([][]kyber.Scalar, NQ) + for j := 0; j < NQ; j++ { + beta[j] = make([]kyber.Scalar, k) + for i := 0; i < k; i++ { + beta[j][i] = group.Scalar().Pick(rand) + } + } + + // Perform the Shuffle + Xbar = make([][]kyber.Point, NQ) + Ybar = make([][]kyber.Point, NQ) + + for j := 0; j < NQ; j++ { + Xbar[j] = make([]kyber.Point, k) + Ybar[j] = make([]kyber.Point, k) + + for i := 0; i < k; i++ { + Xbar[j][i] = group.Point().Mul(beta[j][pi[i]], g) + Xbar[j][i].Add(Xbar[j][i], X[j][pi[i]]) + + Ybar[j][i] = group.Point().Mul(beta[j][pi[i]], h) + Ybar[j][i].Add(Ybar[j][i], Y[j][pi[i]]) + } + } + + getProver = func(e []kyber.Scalar) (proof.Prover, error) { + // EGAR 2 (Prover) - Standard ElGamal k-shuffle proof: Knowledge of + // (XUp, YUp), (XDown, YDown) and e[j] + + ps := PairShuffle{} + ps.Init(group, k) + + if len(e) != NQ { + return nil, fmt.Errorf("len(e) must be equal to NQ: %d != %d", len(e), NQ) + } + + return func(ctx proof.ProverContext) error { + // Need to consolidate beta to a one dimensional array + beta2 := make([]kyber.Scalar, k) + + for i := 0; i < k; i++ { + beta2[i] = group.Scalar().Mul(e[0], beta[0][i]) + + for j := 1; j < NQ; j++ { + beta2[i] = group.Scalar().Add(beta2[i], + group.Scalar().Mul(e[j], beta[j][i])) + } + } + + XUp, YUp, _, _ := GetSequenceVerifiable(group, X, Y, Xbar, Ybar, e) + + return ps.Prove(pi, g, h, beta2, XUp, YUp, rand, ctx) + }, nil + } + + return Xbar, Ybar, getProver +} + +// assertXY checks that X, Y have the same dimensions and at least one element +func assertXY(X, Y [][]kyber.Point) error { + if len(X) == 0 || len(X[0]) == 0 { + return errors.New("X is empty") + } + if len(Y) == 0 || len(Y[0]) == 0 { + return errors.New("Y is empty") + } + + if len(X) != len(Y) { + return fmt.Errorf("X and Y have a different size: %d != %d", len(X), len(Y)) + } + + expected := len(X[0]) + + for i := range X { + if len(X[i]) != expected { + return fmt.Errorf("X[%d] has unexpected size: %d != %d", i, expected, len(X[i])) + } + if len(Y[i]) != expected { + return fmt.Errorf("Y[%d] has unexpected size: %d != %d", i, expected, len(Y[i])) + } + } + + return nil +} + +// GetSequenceVerifiable returns the consolidated input and output of sequence +// shuffling elements. Needed by the prover and verifier. +func GetSequenceVerifiable(group kyber.Group, X, Y, Xbar, Ybar [][]kyber.Point, e []kyber.Scalar) ( + XUp, YUp, XDown, YDown []kyber.Point) { + + // EGAR1 (Verifier) - Consolidate input and output + NQ := len(X) + k := len(X[0]) + + XUp = make([]kyber.Point, k) + YUp = make([]kyber.Point, k) + XDown = make([]kyber.Point, k) + YDown = make([]kyber.Point, k) + + for i := 0; i < k; i++ { + // No modification could be made for e[0] -> e[0] = 1 if one wanted - + // Remark 7 in the paper + XUp[i] = group.Point().Mul(e[0], X[0][i]) + YUp[i] = group.Point().Mul(e[0], Y[0][i]) + + XDown[i] = group.Point().Mul(e[0], Xbar[0][i]) + YDown[i] = group.Point().Mul(e[0], Ybar[0][i]) + + for j := 1; j < NQ; j++ { + XUp[i] = group.Point().Add(XUp[i], + group.Point().Mul(e[j], X[j][i])) + YUp[i] = group.Point().Add(YUp[i], + group.Point().Mul(e[j], Y[j][i])) + + XDown[i] = group.Point().Add(XDown[i], + group.Point().Mul(e[j], Xbar[j][i])) + YDown[i] = group.Point().Add(YDown[i], + group.Point().Mul(e[j], Ybar[j][i])) + } + } + + return XUp, YUp, XDown, YDown +} diff --git a/shuffle/shuffle_test.go b/shuffle/shuffle_test.go index c46fb1323..8eafc2466 100644 --- a/shuffle/shuffle_test.go +++ b/shuffle/shuffle_test.go @@ -10,14 +10,20 @@ import ( ) var k = 5 +var NQ = 6 var N = 10 -func TestShuffle(t *testing.T) { +func TestShufflePair(t *testing.T) { s := edwards25519.NewBlakeSHA256Ed25519WithRand(blake2xb.New(nil)) - shuffleTest(s, k, N) + pairShuffleTest(s, k, N) } -func shuffleTest(suite Suite, k, N int) { +func TestShuffleSequence(t *testing.T) { + s := edwards25519.NewBlakeSHA256Ed25519WithRand(blake2xb.New(nil)) + sequenceShuffleTest(s, k, NQ, N) +} + +func pairShuffleTest(suite Suite, k, N int) { rand := suite.RandomStream() // Create a "server" private/public keypair @@ -64,3 +70,80 @@ func shuffleTest(suite Suite, k, N int) { } } } + +func sequenceShuffleTest(suite Suite, k, NQ, N int) { + rand := suite.RandomStream() + + // Create a "server" private/public keypair + h := suite.Scalar().Pick(rand) + H := suite.Point().Mul(h, nil) + + // Create a set of ephemeral "client" keypairs to shuffle + c := make([]kyber.Scalar, k) + C := make([]kyber.Point, k) + + for i := 0; i < k; i++ { + c[i] = suite.Scalar().Pick(rand) + C[i] = suite.Point().Mul(c[i], nil) + } + + X := make([][]kyber.Point, NQ) + Y := make([][]kyber.Point, NQ) + + // generate random sequences + for i := 0; i < NQ; i++ { + xs := make([]kyber.Point, k) + ys := make([]kyber.Point, k) + + for j := 0; j < k; j++ { + xs[j] = suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + ys[j] = suite.Point().Mul(suite.Scalar().Pick(suite.RandomStream()), nil) + } + + X[i] = xs + Y[i] = ys + } + + // ElGamal-encrypt all these keypairs with the "server" key + r := suite.Scalar() // temporary + for j := 0; j < NQ; j++ { + for i := 0; i < k; i++ { + r.Pick(rand) + X[j][i] = suite.Point().Mul(r, nil) + Y[j][i] = suite.Point().Mul(r, H) // ElGamal blinding factor + Y[j][i].Add(Y[j][i], C[i]) // Encrypted client public key + } + } + + // Repeat only the actual shuffle portion for test purposes. + for i := 0; i < N; i++ { + + // Do a key-shuffle + XX, YY, getProver := SequencesShuffle(suite, nil, H, X, Y, rand) + + e := make([]kyber.Scalar, NQ) + for j := 0; j < NQ; j++ { + e[j] = suite.Scalar().Pick(suite.RandomStream()) + } + + prover, err := getProver(e) + if err != nil { + panic("failed to get prover: " + err.Error()) + } + + prf, err := proof.HashProve(suite, "PairShuffle", prover) + if err != nil { + panic("failed to hashProve: " + err.Error()) + } + + XXUp, YYUp, XXDown, YYDown := GetSequenceVerifiable(suite, X, Y, XX, YY, e) + + // Check it + verifier := Verifier(suite, nil, H, XXUp, YYUp, XXDown, YYDown) + + err = proof.HashVerify(suite, "PairShuffle", verifier, prf) + if err != nil { + panic("failed to hashVerify: " + err.Error()) + } + } +} diff --git a/shuffle/vartime_test.go b/shuffle/vartime_test.go index f996a7fd1..23d340154 100644 --- a/shuffle/vartime_test.go +++ b/shuffle/vartime_test.go @@ -11,9 +11,21 @@ func BenchmarkBiffleP256(b *testing.B) { } func Benchmark2PairShuffleP256(b *testing.B) { - shuffleTest(nist.NewBlakeSHA256P256(), 2, b.N) + pairShuffleTest(nist.NewBlakeSHA256P256(), 2, b.N) } func Benchmark10PairShuffleP256(b *testing.B) { - shuffleTest(nist.NewBlakeSHA256P256(), 10, b.N) + pairShuffleTest(nist.NewBlakeSHA256P256(), 10, b.N) +} + +func Benchmark2Pair2SeqShuffleP256(b *testing.B) { + sequenceShuffleTest(nist.NewBlakeSHA256P256(), 2, 2, b.N) +} + +func Benchmark2Pair10SeqShuffleP256(b *testing.B) { + sequenceShuffleTest(nist.NewBlakeSHA256P256(), 2, 10, b.N) +} + +func Benchmark10Pair10SeqShuffleP256(b *testing.B) { + sequenceShuffleTest(nist.NewBlakeSHA256P256(), 10, 10, b.N) } diff --git a/sign/bdn/bdn.go b/sign/bdn/bdn.go index 7f8aa78f9..b2b889909 100644 --- a/sign/bdn/bdn.go +++ b/sign/bdn/bdn.go @@ -70,13 +70,13 @@ func hashPointToR(pubs []kyber.Point) ([]kyber.Scalar, error) { // NewKeyPair creates a new BLS signing key pair. The private key x is a scalar // and the public key X is a point on curve G2. func NewKeyPair(suite pairing.Suite, random cipher.Stream) (kyber.Scalar, kyber.Point) { - return bls.NewKeyPair(suite, random) + return bls.NewSchemeOnG1(suite).NewKeyPair(random) } // Sign creates a BLS signature S = x * H(m) on a message m using the private // key x. The signature S is a point on curve G1. func Sign(suite pairing.Suite, x kyber.Scalar, msg []byte) ([]byte, error) { - return bls.Sign(suite, x, msg) + return bls.NewSchemeOnG1(suite).Sign(x, msg) } // Verify checks the given BLS signature S on the message m using the public @@ -84,7 +84,7 @@ func Sign(suite pairing.Suite, x kyber.Scalar, msg []byte) ([]byte, error) { // e(x*H(m), B2) == e(S, B2) holds where e is the pairing operation and B2 is // the base point from curve G2. func Verify(suite pairing.Suite, x kyber.Point, msg, sig []byte) error { - return bls.Verify(suite, x, msg, sig) + return bls.NewSchemeOnG1(suite).Verify(x, msg, sig) } // AggregateSignatures aggregates the signatures using a coefficient for each diff --git a/sign/bdn/bdn_test.go b/sign/bdn/bdn_test.go index d2542b458..a1aa20076 100644 --- a/sign/bdn/bdn_test.go +++ b/sign/bdn/bdn_test.go @@ -6,14 +6,13 @@ import ( "github.com/stretchr/testify/require" "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/pairing" "go.dedis.ch/kyber/v3/pairing/bn256" "go.dedis.ch/kyber/v3/sign" "go.dedis.ch/kyber/v3/sign/bls" "go.dedis.ch/kyber/v3/util/random" ) -var suite = pairing.NewSuiteBn256() +var suite = bn256.NewSuiteBn256() var two = suite.Scalar().Add(suite.Scalar().One(), suite.Scalar().One()) var three = suite.Scalar().Add(two, suite.Scalar().One()) @@ -124,8 +123,8 @@ func TestBDN_RogueAttack(t *testing.T) { require.NoError(t, err) // Old scheme not resistant to the attack - agg := bls.AggregatePublicKeys(suite, pubs...) - require.NoError(t, bls.Verify(suite, agg, msg, sig)) + agg := bls.NewSchemeOnG1(suite).AggregatePublicKeys(pubs...) + require.NoError(t, bls.NewSchemeOnG1(suite).Verify(agg, msg, sig)) // New scheme that should detect mask, _ := sign.NewMask(suite, pubs, nil) diff --git a/sign/bls/bls.go b/sign/bls/bls.go index f2abb3c0a..f63d11d8f 100644 --- a/sign/bls/bls.go +++ b/sign/bls/bls.go @@ -19,42 +19,65 @@ import ( "go.dedis.ch/kyber/v3" "go.dedis.ch/kyber/v3/pairing" + "go.dedis.ch/kyber/v3/sign" ) -type hashablePoint interface { - Hash([]byte) kyber.Point +type scheme struct { + sigGroup kyber.Group + keyGroup kyber.Group + pairing func(signature, public, hashedPoint kyber.Point) bool } // NewKeyPair creates a new BLS signing key pair. The private key x is a scalar // and the public key X is a point on curve G2. -func NewKeyPair(suite pairing.Suite, random cipher.Stream) (kyber.Scalar, kyber.Point) { - x := suite.G2().Scalar().Pick(random) - X := suite.G2().Point().Mul(x, nil) - return x, X +func (s *scheme) NewKeyPair(random cipher.Stream) (kyber.Scalar, kyber.Point) { + secret := s.keyGroup.Scalar().Pick(random) + public := s.keyGroup.Point().Mul(secret, nil) + return secret, public } // Sign creates a BLS signature S = x * H(m) on a message m using the private // key x. The signature S is a point on curve G1. -func Sign(suite pairing.Suite, x kyber.Scalar, msg []byte) ([]byte, error) { - hashable, ok := suite.G1().Point().(hashablePoint) +func (s *scheme) Sign(private kyber.Scalar, msg []byte) ([]byte, error) { + hashable, ok := s.sigGroup.Point().(kyber.HashablePoint) if !ok { return nil, errors.New("point needs to implement hashablePoint") } HM := hashable.Hash(msg) - xHM := HM.Mul(x, HM) + xHM := HM.Mul(private, HM) - s, err := xHM.MarshalBinary() + sig, err := xHM.MarshalBinary() if err != nil { return nil, err } - return s, nil + return sig, nil +} + +// Verify checks the given BLS signature S on the message m using the public +// key X by verifying that the equality e(H(m), X) == e(H(m), x*B2) == +// e(x*H(m), B2) == e(S, B2) holds where e is the pairing operation and B2 is +// the base point from curve G2. +func (s *scheme) Verify(X kyber.Point, msg, sig []byte) error { + hashable, ok := s.sigGroup.Point().(kyber.HashablePoint) + if !ok { + return errors.New("bls: point needs to implement hashablePoint") + } + HM := hashable.Hash(msg) + sigPoint := s.sigGroup.Point() + if err := sigPoint.UnmarshalBinary(sig); err != nil { + return err + } + if !s.pairing(X, HM, sigPoint) { + return errors.New("bls: invalid signature") + } + return nil } // AggregateSignatures combines signatures created using the Sign function -func AggregateSignatures(suite pairing.Suite, sigs ...[]byte) ([]byte, error) { - sig := suite.G1().Point() +func (s *scheme) AggregateSignatures(sigs ...[]byte) ([]byte, error) { + sig := s.sigGroup.Point() for _, sigBytes := range sigs { - sigToAdd := suite.G1().Point() + sigToAdd := s.sigGroup.Point() if err := sigToAdd.UnmarshalBinary(sigBytes); err != nil { return nil, err } @@ -65,8 +88,8 @@ func AggregateSignatures(suite pairing.Suite, sigs ...[]byte) ([]byte, error) { // AggregatePublicKeys takes a slice of public G2 points and returns // the sum of those points. This is used to verify multisignatures. -func AggregatePublicKeys(suite pairing.Suite, Xs ...kyber.Point) kyber.Point { - aggregated := suite.G2().Point() +func (s *scheme) AggregatePublicKeys(Xs ...kyber.Point) kyber.Point { + aggregated := s.keyGroup.Point() for _, X := range Xs { aggregated.Add(aggregated, X) } @@ -91,7 +114,7 @@ func BatchVerify(suite pairing.Suite, publics []kyber.Point, msgs [][]byte, sig var aggregatedLeft kyber.Point for i := range msgs { - hashable, ok := suite.G1().Point().(hashablePoint) + hashable, ok := suite.G1().Point().(kyber.HashablePoint) if !ok { return errors.New("bls: point needs to implement hashablePoint") } @@ -112,26 +135,34 @@ func BatchVerify(suite pairing.Suite, publics []kyber.Point, msgs [][]byte, sig return nil } -// Verify checks the given BLS signature S on the message m using the public -// key X by verifying that the equality e(H(m), X) == e(H(m), x*B2) == -// e(x*H(m), B2) == e(S, B2) holds where e is the pairing operation and B2 is -// the base point from curve G2. -func Verify(suite pairing.Suite, X kyber.Point, msg, sig []byte) error { - hashable, ok := suite.G1().Point().(hashablePoint) - if !ok { - return errors.New("bls: point needs to implement hashablePoint") +// NewSchemeOnG1 returns a sign.Scheme that uses G1 for its signature space and G2 +// for its public keys +func NewSchemeOnG1(suite pairing.Suite) sign.AggregatableScheme { + sigGroup := suite.G1() + keyGroup := suite.G2() + pairing := func(public, hashedMsg, sigPoint kyber.Point) bool { + return suite.ValidatePairing(hashedMsg, public, sigPoint, keyGroup.Point().Base()) } - HM := hashable.Hash(msg) - left := suite.Pair(HM, X) - s := suite.G1().Point() - if err := s.UnmarshalBinary(sig); err != nil { - return err + return &scheme{ + sigGroup: sigGroup, + keyGroup: keyGroup, + pairing: pairing, } - right := suite.Pair(s, suite.G2().Point().Base()) - if !left.Equal(right) { - return errors.New("bls: invalid signature") +} + +// NewSchemeOnG2 returns a sign.Scheme that uses G2 for its signature space and +// G1 for its public key +func NewSchemeOnG2(suite pairing.Suite) sign.AggregatableScheme { + sigGroup := suite.G2() + keyGroup := suite.G1() + pairing := func(public, hashedMsg, sigPoint kyber.Point) bool { + return suite.ValidatePairing(public, hashedMsg, keyGroup.Point().Base(), sigPoint) + } + return &scheme{ + sigGroup: sigGroup, + keyGroup: keyGroup, + pairing: pairing, } - return nil } func distinct(msgs [][]byte) bool { diff --git a/sign/bls/bls_test.go b/sign/bls/bls_test.go deleted file mode 100644 index 276b42219..000000000 --- a/sign/bls/bls_test.go +++ /dev/null @@ -1,270 +0,0 @@ -package bls - -import ( - "crypto/rand" - "testing" - - "github.com/stretchr/testify/require" - "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/pairing" - "go.dedis.ch/kyber/v3/pairing/bls12381" - "go.dedis.ch/kyber/v3/pairing/bn256" - "go.dedis.ch/kyber/v3/util/random" -) - -func TestBn256(t *testing.T) { - suite := bn256.NewSuite() - testBLS(t, suite) - testBLSFailSig(t, suite) - testBLSFailKey(t, suite) - testBLSAggregateSignatures(t, suite) - testBLSFailAggregatedSig(t, suite) - testBLSFailAggregatedKey(t, suite) - testBLSBatchVerify(t, suite) - testBLSFailBatchVerify(t, suite) - testBinaryMarshalAfterAggregationIssue400(t, suite) -} - -func TestBLS12381(t *testing.T) { - suite := bls12381.NewSuite() - testBLS(t, suite) - testBLSFailSig(t, suite) - testBLSFailKey(t, suite) - testBLSAggregateSignatures(t, suite) - testBLSFailAggregatedSig(t, suite) - testBLSFailAggregatedKey(t, suite) - testBLSBatchVerify(t, suite) - testBLSFailBatchVerify(t, suite) - testBinaryMarshalAfterAggregationIssue400(t, suite) -} - -func testBLS(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private, public := NewKeyPair(suite, random.New()) - sig, err := Sign(suite, private, msg) - require.Nil(t, err) - err = Verify(suite, public, msg, sig) - require.Nil(t, err) -} - -func testBLSFailSig(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private, public := NewKeyPair(suite, random.New()) - sig, err := Sign(suite, private, msg) - require.Nil(t, err) - sig[0] ^= 0x01 - if Verify(suite, public, msg, sig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } -} - -func testBLSFailKey(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private, _ := NewKeyPair(suite, random.New()) - sig, err := Sign(suite, private, msg) - require.Nil(t, err) - _, public := NewKeyPair(suite, random.New()) - if Verify(suite, public, msg, sig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } -} - -func testBLSAggregateSignatures(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - sig1, err := Sign(suite, private1, msg) - require.Nil(t, err) - sig2, err := Sign(suite, private2, msg) - require.Nil(t, err) - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2) - require.Nil(t, err) - - aggregatedKey := AggregatePublicKeys(suite, public1, public2) - - err = Verify(suite, aggregatedKey, msg, aggregatedSig) - require.Nil(t, err) -} - -func testBLSFailAggregatedSig(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - sig1, err := Sign(suite, private1, msg) - require.Nil(t, err) - sig2, err := Sign(suite, private2, msg) - require.Nil(t, err) - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2) - require.Nil(t, err) - aggregatedKey := AggregatePublicKeys(suite, public1, public2) - - aggregatedSig[0] ^= 0x01 - if Verify(suite, aggregatedKey, msg, aggregatedSig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } -} -func testBLSFailAggregatedKey(t *testing.T, suite pairing.Suite) { - msg := []byte("Hello Boneh-Lynn-Shacham") - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - _, public3 := NewKeyPair(suite, random.New()) - sig1, err := Sign(suite, private1, msg) - require.Nil(t, err) - sig2, err := Sign(suite, private2, msg) - require.Nil(t, err) - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2) - require.Nil(t, err) - badAggregatedKey := AggregatePublicKeys(suite, public1, public2, public3) - - if Verify(suite, badAggregatedKey, msg, aggregatedSig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } -} -func testBLSBatchVerify(t *testing.T, suite pairing.Suite) { - msg1 := []byte("Hello Boneh-Lynn-Shacham") - msg2 := []byte("Hello Dedis & Boneh-Lynn-Shacham") - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - sig1, err := Sign(suite, private1, msg1) - require.Nil(t, err) - sig2, err := Sign(suite, private2, msg2) - require.Nil(t, err) - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2) - require.Nil(t, err) - - err = BatchVerify(suite, []kyber.Point{public1, public2}, [][]byte{msg1, msg2}, aggregatedSig) - require.Nil(t, err) -} -func testBLSFailBatchVerify(t *testing.T, suite pairing.Suite) { - msg1 := []byte("Hello Boneh-Lynn-Shacham") - msg2 := []byte("Hello Dedis & Boneh-Lynn-Shacham") - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - sig1, err := Sign(suite, private1, msg1) - require.Nil(t, err) - sig2, err := Sign(suite, private2, msg2) - require.Nil(t, err) - - t.Run("fails with a bad signature", func(t *testing.T) { - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2) - require.Nil(t, err) - msg2[0] ^= 0x01 - if BatchVerify(suite, []kyber.Point{public1, public2}, [][]byte{msg1, msg2}, aggregatedSig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } - }) - - t.Run("fails with a duplicate msg", func(t *testing.T) { - private3, public3 := NewKeyPair(suite, random.New()) - sig3, err := Sign(suite, private3, msg1) - require.Nil(t, err) - aggregatedSig, err := AggregateSignatures(suite, sig1, sig2, sig3) - require.Nil(t, err) - - if BatchVerify(suite, []kyber.Point{public1, public2, public3}, [][]byte{msg1, msg2, msg1}, aggregatedSig) == nil { - t.Fatal("bls: verification succeeded unexpectedly") - } - }) - -} -func testBinaryMarshalAfterAggregationIssue400(t *testing.T, suite pairing.Suite) { - _, public1 := NewKeyPair(suite, random.New()) - _, public2 := NewKeyPair(suite, random.New()) - - workingKey := AggregatePublicKeys(suite, public1, public2, public1) - - workingBits, err := workingKey.MarshalBinary() - require.Nil(t, err) - - workingPoint := suite.G2().Point() - err = workingPoint.UnmarshalBinary(workingBits) - require.Nil(t, err) - - // this was failing before the fix - aggregatedKey := AggregatePublicKeys(suite, public1, public1, public2) - - bits, err := aggregatedKey.MarshalBinary() - require.Nil(t, err) - - point := suite.G2().Point() - err = point.UnmarshalBinary(bits) - require.Nil(t, err) -} - -func BenchmarkBLSKeyCreation(b *testing.B) { - suite := bn256.NewSuite() - b.ResetTimer() - for i := 0; i < b.N; i++ { - NewKeyPair(suite, random.New()) - } -} - -func BenchmarkBLSSign(b *testing.B) { - suite := bn256.NewSuite() - private, _ := NewKeyPair(suite, random.New()) - msg := []byte("Hello many times Boneh-Lynn-Shacham") - b.ResetTimer() - for i := 0; i < b.N; i++ { - Sign(suite, private, msg) - } -} - -func BenchmarkBLSAggregateSigs(b *testing.B) { - suite := bn256.NewSuite() - private1, _ := NewKeyPair(suite, random.New()) - private2, _ := NewKeyPair(suite, random.New()) - msg := []byte("Hello many times Boneh-Lynn-Shacham") - sig1, err := Sign(suite, private1, msg) - require.Nil(b, err) - sig2, err := Sign(suite, private2, msg) - require.Nil(b, err) - - b.ResetTimer() - for i := 0; i < b.N; i++ { - AggregateSignatures(suite, sig1, sig2) - } -} - -func BenchmarkBLSVerifyAggregate(b *testing.B) { - suite := bn256.NewSuite() - private1, public1 := NewKeyPair(suite, random.New()) - private2, public2 := NewKeyPair(suite, random.New()) - msg := []byte("Hello many times Boneh-Lynn-Shacham") - sig1, err := Sign(suite, private1, msg) - require.Nil(b, err) - sig2, err := Sign(suite, private2, msg) - require.Nil(b, err) - sig, err := AggregateSignatures(suite, sig1, sig2) - key := AggregatePublicKeys(suite, public1, public2) - b.ResetTimer() - for i := 0; i < b.N; i++ { - Verify(suite, key, msg, sig) - } -} - -func BenchmarkBLSVerifyBatchVerify(b *testing.B) { - suite := bn256.NewSuite() - - numSigs := 100 - privates := make([]kyber.Scalar, numSigs) - publics := make([]kyber.Point, numSigs) - msgs := make([][]byte, numSigs) - sigs := make([][]byte, numSigs) - for i := 0; i < numSigs; i++ { - private, public := NewKeyPair(suite, random.New()) - privates[i] = private - publics[i] = public - msg := make([]byte, 64, 64) - rand.Read(msg) - msgs[i] = msg - sig, err := Sign(suite, private, msg) - require.Nil(b, err) - sigs[i] = sig - } - - b.ResetTimer() - for i := 0; i < b.N; i++ { - aggregateSig, _ := AggregateSignatures(suite, sigs...) - BatchVerify(suite, publics, msgs, aggregateSig) - } -} diff --git a/sign/mask_test.go b/sign/mask_test.go index aa86d7c14..84a4d24eb 100644 --- a/sign/mask_test.go +++ b/sign/mask_test.go @@ -6,13 +6,13 @@ import ( "github.com/stretchr/testify/require" "go.dedis.ch/kyber/v3" - "go.dedis.ch/kyber/v3/pairing" + "go.dedis.ch/kyber/v3/pairing/bn256" "go.dedis.ch/kyber/v3/util/key" ) const n = 17 -var suite = pairing.NewSuiteBn256() +var suite = bn256.NewSuiteBn256() var publics []kyber.Point func init() { diff --git a/sign/sign.go b/sign/sign.go new file mode 100644 index 000000000..0ef296813 --- /dev/null +++ b/sign/sign.go @@ -0,0 +1,36 @@ +package sign + +import ( + "crypto/cipher" + + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/share" +) + +// Scheme is the minimal interface for a signature scheme. +// Implemented by BLS and TBLS +type Scheme interface { + NewKeyPair(random cipher.Stream) (kyber.Scalar, kyber.Point) + Sign(private kyber.Scalar, msg []byte) ([]byte, error) + Verify(public kyber.Point, msg, sig []byte) error +} + +// AggregatableScheme is an interface allowing to aggregate signatures and +// public keys to efficient verification. +type AggregatableScheme interface { + Scheme + AggregateSignatures(sigs ...[]byte) ([]byte, error) + AggregatePublicKeys(Xs ...kyber.Point) kyber.Point +} + +// ThresholdScheme is a threshold signature scheme that issues partial +// signatures and can recover a "full" signature. It is implemented by the tbls +// package. +// TODO: see any potential conflict or synergy with mask and policy +type ThresholdScheme interface { + Sign(private *share.PriShare, msg []byte) ([]byte, error) + IndexOf(signature []byte) (int, error) + Recover(public *share.PubPoly, msg []byte, sigs [][]byte, t, n int) ([]byte, error) + VerifyPartial(public *share.PubPoly, msg, sig []byte) error + VerifyRecovered(public kyber.Point, msg, sig []byte) error +} diff --git a/sign/tbls/tbls.go b/sign/tbls/tbls.go index 8d12511e3..d24baa5b5 100644 --- a/sign/tbls/tbls.go +++ b/sign/tbls/tbls.go @@ -13,9 +13,12 @@ package tbls import ( "bytes" "encoding/binary" + "errors" + "go.dedis.ch/kyber/v3" "go.dedis.ch/kyber/v3/pairing" "go.dedis.ch/kyber/v3/share" + "go.dedis.ch/kyber/v3/sign" "go.dedis.ch/kyber/v3/sign/bls" ) @@ -24,6 +27,12 @@ import ( // share's value. The signature share Si is a point on curve G1. type SigShare []byte +type scheme struct { + keyGroup kyber.Group + sigGroup kyber.Group + sign.Scheme +} + // Index returns the index i of the TBLS share Si. func (s SigShare) Index() (int, error) { var index uint16 @@ -42,32 +51,63 @@ func (s *SigShare) Value() []byte { // Sign creates a threshold BLS signature Si = xi * H(m) on the given message m // using the provided secret key share xi. -func Sign(suite pairing.Suite, private *share.PriShare, msg []byte) ([]byte, error) { +func (s *scheme) Sign(private *share.PriShare, msg []byte) ([]byte, error) { buf := new(bytes.Buffer) if err := binary.Write(buf, binary.BigEndian, uint16(private.I)); err != nil { return nil, err } - s, err := bls.Sign(suite, private.V, msg) + sig, err := s.Scheme.Sign(private.V, msg) if err != nil { return nil, err } - if err := binary.Write(buf, binary.BigEndian, s); err != nil { + if err := binary.Write(buf, binary.BigEndian, sig); err != nil { return nil, err } return buf.Bytes(), nil } -// Verify checks the given threshold BLS signature Si on the message m using +// NewThresholdSchemeOnG1 returns a treshold scheme that computes bls signatures +// on G1 +func NewThresholdSchemeOnG1(suite pairing.Suite) sign.ThresholdScheme { + return &scheme{ + keyGroup: suite.G2(), + sigGroup: suite.G1(), + Scheme: bls.NewSchemeOnG1(suite), + } +} + +// NewThresholdSchemeOnG2 returns a treshold scheme that computes bls signatures +// on G2 +func NewThresholdSchemeOnG2(suite pairing.Suite) sign.ThresholdScheme { + return &scheme{ + keyGroup: suite.G1(), + sigGroup: suite.G2(), + Scheme: bls.NewSchemeOnG2(suite), + } +} + +func (s *scheme) IndexOf(signature []byte) (int, error) { + if len(signature) != s.sigGroup.PointLen()+2 { + return -1, errors.New("invalid partial signature length") + } + return SigShare(signature).Index() +} + +// VerifyPartial checks the given threshold BLS signature Si on the message m using // the public key share Xi that is associated to the secret key share xi. This // public key share Xi can be computed by evaluating the public sharing // polynonmial at the share's index i. -func Verify(suite pairing.Suite, public *share.PubPoly, msg, sig []byte) error { - s := SigShare(sig) - i, err := s.Index() +func (s *scheme) VerifyPartial(public *share.PubPoly, msg, sig []byte) error { + sh := SigShare(sig) + i, err := sh.Index() if err != nil { return err } - return bls.Verify(suite, public.Eval(i).V, msg, s.Value()) + return s.Scheme.Verify(public.Eval(i).V, msg, sh.Value()) +} + +func (s *scheme) VerifyRecovered(public kyber.Point, msg, sig []byte) error { + return s.Scheme.Verify(public, msg, sig) } // Recover reconstructs the full BLS signature S = x * H(m) from a threshold t @@ -75,27 +115,30 @@ func Verify(suite pairing.Suite, public *share.PubPoly, msg, sig []byte) error { // can be verified through the regular BLS verification routine using the // shared public key X. The shared public key can be computed by evaluating the // public sharing polynomial at index 0. -func Recover(suite pairing.Suite, public *share.PubPoly, msg []byte, sigs [][]byte, t, n int) ([]byte, error) { - pubShares := make([]*share.PubShare, 0) +func (s *scheme) Recover(public *share.PubPoly, msg []byte, sigs [][]byte, t, n int) ([]byte, error) { + var pubShares []*share.PubShare for _, sig := range sigs { - s := SigShare(sig) - i, err := s.Index() + sh := SigShare(sig) + i, err := sh.Index() if err != nil { - return nil, err + continue } - if err = bls.Verify(suite, public.Eval(i).V, msg, s.Value()); err != nil { - return nil, err + if err = s.Scheme.Verify(public.Eval(i).V, msg, sh.Value()); err != nil { + continue } - point := suite.G1().Point() - if err := point.UnmarshalBinary(s.Value()); err != nil { - return nil, err + point := s.sigGroup.Point() + if err := point.UnmarshalBinary(sh.Value()); err != nil { + continue } pubShares = append(pubShares, &share.PubShare{I: i, V: point}) if len(pubShares) >= t { break } } - commit, err := share.RecoverCommit(suite.G1(), pubShares, t, n) + if len(pubShares) < t { + return nil, errors.New("not enough valid partial signatures") + } + commit, err := share.RecoverCommit(s.sigGroup, pubShares, t, n) if err != nil { return nil, err } diff --git a/sign/tbls/tbls_test.go b/sign/tbls/tbls_test.go index 079556252..2256ca9b6 100644 --- a/sign/tbls/tbls_test.go +++ b/sign/tbls/tbls_test.go @@ -3,40 +3,12 @@ package tbls import ( "testing" - "github.com/stretchr/testify/require" - "go.dedis.ch/kyber/v3/pairing" - "go.dedis.ch/kyber/v3/pairing/bls12381" "go.dedis.ch/kyber/v3/pairing/bn256" - "go.dedis.ch/kyber/v3/share" - "go.dedis.ch/kyber/v3/sign/bls" + "go.dedis.ch/kyber/v3/sign/test" ) -func TestBn256(test *testing.T) { +func TestBN256(t *testing.T) { suite := bn256.NewSuite() - testTBLS(test, suite) -} - -func TestBLS12381(test *testing.T) { - suite := bls12381.NewSuite() - testTBLS(test, suite) -} - -func testTBLS(test *testing.T, suite pairing.Suite) { - var err error - msg := []byte("Hello threshold Boneh-Lynn-Shacham") - n := 10 - t := n/2 + 1 - secret := suite.G1().Scalar().Pick(suite.RandomStream()) - priPoly := share.NewPriPoly(suite.G2(), t, secret, suite.RandomStream()) - pubPoly := priPoly.Commit(suite.G2().Point().Base()) - sigShares := make([][]byte, 0) - for _, x := range priPoly.Shares(n) { - sig, err := Sign(suite, x, msg) - require.Nil(test, err) - sigShares = append(sigShares, sig) - } - sig, err := Recover(suite, pubPoly, msg, sigShares, t, n) - require.Nil(test, err) - err = bls.Verify(suite, pubPoly.Commit(), msg, sig) - require.Nil(test, err) + scheme := NewThresholdSchemeOnG1(suite) + test.ThresholdTest(t, suite.G2(), scheme) } diff --git a/sign/test/bls_test.go b/sign/test/bls_test.go new file mode 100644 index 000000000..d2aae72c4 --- /dev/null +++ b/sign/test/bls_test.go @@ -0,0 +1,14 @@ +package test + +import ( + "testing" + + bls "go.dedis.ch/kyber/v3/pairing/bls12381/kilic" + sign "go.dedis.ch/kyber/v3/sign/bls" +) + +func TestBLS12381(t *testing.T) { + suite := bls.NewBLS12381Suite() + scheme := sign.NewSchemeOnG1(suite) + SchemeTesting(t, scheme) +} diff --git a/sign/test/scheme.go b/sign/test/scheme.go new file mode 100644 index 000000000..bb579ddcb --- /dev/null +++ b/sign/test/scheme.go @@ -0,0 +1,95 @@ +package test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3/sign" + "go.dedis.ch/kyber/v3/util/random" +) + +// SchemeTesting tests a scheme with simple checks +func SchemeTesting(t *testing.T, s sign.Scheme) { + t.Run("Regular signing & verifying", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private, public := s.NewKeyPair(random.New()) + sig, err := s.Sign(private, msg) + require.Nil(tt, err) + err = s.Verify(public, msg, sig) + require.Nil(tt, err) + }) + t.Run("Invalid signature", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private, public := s.NewKeyPair(random.New()) + sig, err := s.Sign(private, msg) + require.Nil(tt, err) + sig[0] ^= 0x01 + if s.Verify(public, msg, sig) == nil { + tt.Fatal("verification succeeded unexpectedly") + } + }) + t.Run("Invalid Key", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private, _ := s.NewKeyPair(random.New()) + sig, err := s.Sign(private, msg) + require.Nil(tt, err) + _, public := s.NewKeyPair(random.New()) + if s.Verify(public, msg, sig) == nil { + tt.Fatal("verification succeeded unexpectedly") + } + }) +} + +// AggregationTesting test an aggrgatable scheme +func AggregationTesting(t *testing.T, s sign.AggregatableScheme) { + t.Run("Aggregation valid", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private1, public1 := s.NewKeyPair(random.New()) + private2, public2 := s.NewKeyPair(random.New()) + sig1, err := s.Sign(private1, msg) + require.Nil(tt, err) + sig2, err := s.Sign(private2, msg) + require.Nil(tt, err) + aggregatedSig, err := s.AggregateSignatures(sig1, sig2) + require.Nil(tt, err) + aggregatedKey := s.AggregatePublicKeys(public1, public2) + + err = s.Verify(aggregatedKey, msg, aggregatedSig) + require.Nil(tt, err) + }) + t.Run("Aggregation with invalid sig", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private1, public1 := s.NewKeyPair(random.New()) + private2, public2 := s.NewKeyPair(random.New()) + sig1, err := s.Sign(private1, msg) + require.Nil(tt, err) + sig2, err := s.Sign(private2, msg) + require.Nil(tt, err) + aggregatedSig, err := s.AggregateSignatures(sig1, sig2) + require.Nil(tt, err) + aggregatedKey := s.AggregatePublicKeys(public1, public2) + + aggregatedSig[0] ^= 0x01 + if s.Verify(aggregatedKey, msg, aggregatedSig) == nil { + tt.Fatal("bls: verification succeeded unexpectedly") + } + }) + + t.Run("Aggregation with invalid public", func(tt *testing.T) { + msg := []byte("Hello Boneh-Lynn-Shacham") + private1, public1 := s.NewKeyPair(random.New()) + private2, public2 := s.NewKeyPair(random.New()) + _, public3 := s.NewKeyPair(random.New()) + sig1, err := s.Sign(private1, msg) + require.Nil(tt, err) + sig2, err := s.Sign(private2, msg) + require.Nil(tt, err) + aggregatedSig, err := s.AggregateSignatures(sig1, sig2) + require.Nil(tt, err) + badAggregatedKey := s.AggregatePublicKeys(public1, public2, public3) + + if s.Verify(badAggregatedKey, msg, aggregatedSig) == nil { + t.Fatal("bls: verification succeeded unexpectedly") + } + }) +} diff --git a/sign/test/threshold.go b/sign/test/threshold.go new file mode 100644 index 000000000..1329c9825 --- /dev/null +++ b/sign/test/threshold.go @@ -0,0 +1,97 @@ +package test + +import ( + "testing" + + "github.com/stretchr/testify/require" + "go.dedis.ch/kyber/v3" + "go.dedis.ch/kyber/v3/share" + "go.dedis.ch/kyber/v3/sign" + "go.dedis.ch/kyber/v3/util/random" +) + +// ThresholdTest performs a simple check on a threshold scheme implementation +func ThresholdTest(test *testing.T, keyGroup kyber.Group, scheme sign.ThresholdScheme) { + msg := []byte("Hello threshold Boneh-Lynn-Shacham") + n := 10 + t := n/2 + 1 + test.Run("Correct sharing and recovering", func(tt *testing.T) { + secret := keyGroup.Scalar().Pick(random.New()) + priPoly := share.NewPriPoly(keyGroup, t, secret, random.New()) + pubPoly := priPoly.Commit(keyGroup.Point().Base()) + sigShares := make([][]byte, 0) + for _, x := range priPoly.Shares(n) { + sig, err := scheme.Sign(x, msg) + require.Nil(tt, err) + require.Nil(tt, scheme.VerifyPartial(pubPoly, msg, sig)) + idx, err := scheme.IndexOf(sig) + require.NoError(tt, err) + require.Equal(tt, x.I, idx) + sigShares = append(sigShares, sig) + idx, err = scheme.IndexOf(sig) + require.NoError(tt, err) + require.Equal(tt, idx, x.I) + } + sig, err := scheme.Recover(pubPoly, msg, sigShares, t, n) + require.Nil(tt, err) + err = scheme.VerifyRecovered(pubPoly.Commit(), msg, sig) + require.Nil(tt, err) + }) + + test.Run("Invalid PublicKey", func(tt *testing.T) { + secret := keyGroup.Scalar().Pick(random.New()) + priPoly := share.NewPriPoly(keyGroup, t, secret, random.New()) + pubPoly := priPoly.Commit(keyGroup.Point().Base()) + sigShares := make([][]byte, 0) + for _, x := range priPoly.Shares(n) { + sig, err := scheme.Sign(x, msg) + require.Nil(tt, err) + require.Nil(tt, scheme.VerifyPartial(pubPoly, msg, sig)) + sigShares = append(sigShares, sig) + } + sig, err := scheme.Recover(pubPoly, msg, sigShares, t, n) + require.Nil(tt, err) + err = scheme.VerifyRecovered(keyGroup.Point().Pick(random.New()), msg, sig) + require.Error(tt, err) + }) + + test.Run("Invalid PartialSig", func(tt *testing.T) { + secret := keyGroup.Scalar().Pick(random.New()) + priPoly := share.NewPriPoly(keyGroup, t, secret, random.New()) + pubPoly := priPoly.Commit(keyGroup.Point().Base()) + fakeSecret := keyGroup.Scalar().Pick(random.New()) + fakePriPoly := share.NewPriPoly(keyGroup, t, fakeSecret, random.New()) + for _, x := range fakePriPoly.Shares(n) { + sig, err := scheme.Sign(x, msg) + require.Nil(tt, err) + require.Error(tt, scheme.VerifyPartial(pubPoly, msg, sig)) + } + + weirdSig := []byte("ain't no sunshine when she's gone") + require.Error(tt, scheme.VerifyPartial(pubPoly, msg, weirdSig)) + _, err := scheme.IndexOf(weirdSig) + require.Error(tt, err) + smallSig := []byte{1, 2, 3} + _, err = scheme.IndexOf(smallSig) + require.Error(tt, err) + + }) + test.Run("Invalid Recovered Sig", func(tt *testing.T) { + secret := keyGroup.Scalar().Pick(random.New()) + priPoly := share.NewPriPoly(keyGroup, t, secret, random.New()) + pubPoly := priPoly.Commit(keyGroup.Point().Base()) + fakeSecret := keyGroup.Scalar().Pick(random.New()) + fakePriPoly := share.NewPriPoly(keyGroup, t, fakeSecret, random.New()) + fakeShares := fakePriPoly.Shares(n) + fakeSigShares := make([][]byte, 0) + fakePubPoly := fakePriPoly.Commit(keyGroup.Point().Base()) + for i := 0; i < n; i++ { + fakeSig, _ := scheme.Sign(fakeShares[i], msg) + fakeSigShares = append(fakeSigShares, fakeSig) + } + fakeSig, err := scheme.Recover(fakePubPoly, msg, fakeSigShares, t, n) + require.Nil(tt, err) + err = scheme.VerifyRecovered(pubPoly.Commit(), msg, fakeSig) + require.Error(tt, err) + }) +} diff --git a/suites/all.go b/suites/all.go index 616235d3e..bdb08e70a 100644 --- a/suites/all.go +++ b/suites/all.go @@ -3,7 +3,6 @@ package suites import ( "go.dedis.ch/kyber/v3/group/edwards25519" "go.dedis.ch/kyber/v3/group/nist" - "go.dedis.ch/kyber/v3/pairing" "go.dedis.ch/kyber/v3/pairing/bn256" ) @@ -15,7 +14,7 @@ func init() { register(bn256.NewSuiteG1()) register(bn256.NewSuiteG2()) register(bn256.NewSuiteGT()) - register(pairing.NewSuiteBn256()) + register(bn256.NewSuiteBn256()) // This is a constant time implementation that should be // used as much as possible register(edwards25519.NewBlakeSHA256Ed25519()) diff --git a/util/encoding/encoding_test.go b/util/encoding/encoding_test.go index f1a0cccdc..a7d230a64 100644 --- a/util/encoding/encoding_test.go +++ b/util/encoding/encoding_test.go @@ -2,6 +2,7 @@ package encoding import ( "bytes" + "io" "testing" "github.com/stretchr/testify/require" @@ -60,3 +61,40 @@ func TestScalarHexString(t *testing.T) { ErrFatal(err) require.True(t, sc.Equal(s2)) } + +// Tests for error cases +type MockFailingReader struct { + data []byte +} +type MockEmptyReader struct { + data []byte +} + +func (m *MockFailingReader) Read(p []byte) (n int, err error) { + return copy(p, m.data), io.EOF +} +func (m *MockEmptyReader) Read(p []byte) (n int, err error) { + return 0, nil +} + +func TestReadHexPointErrorInvalidHexEnc(t *testing.T) { + // Test case: invalid hex encoding + reader := bytes.NewReader([]byte("invalidhex")) + _, err := ReadHexPoint(s, reader) + require.Error(t, err, "Expected error when reading invalid hex encoding, but got nil") +} + +func TestReadHexPointErrorReaderFails(t *testing.T) { + // Test case: reader fails + mockReader1 := &MockFailingReader{data: []byte("abc")} + _, err := ReadHexPoint(s, mockReader1) + require.Error(t, err, "Expected error when reader fails, but got nil") +} + +func TestReadHexPointErrorNotEnoughBytes(t *testing.T) { + // Test case: not enough bytes from stream + mockReader2 := &MockEmptyReader{data: []byte("abc")} + _, err := ReadHexPoint(s, mockReader2) + require.Error(t, err, "Expected error when not enough bytes from stream, but got nil") + require.EqualError(t, err, "didn't get enough bytes from stream", "Expected error message: didn't get enough bytes from stream, but got %s", err.Error()) +} diff --git a/util/random/rand_test.go b/util/random/rand_test.go index 42b50ac03..36f62f76e 100644 --- a/util/random/rand_test.go +++ b/util/random/rand_test.go @@ -3,14 +3,18 @@ package random import ( "bytes" "crypto/rand" + "fmt" + "math/big" + "strconv" "strings" "testing" ) const size = 32 +const readerStream = "some io.Reader stream to be used for testing" func TestMixedEntropy(t *testing.T) { - r := strings.NewReader("some io.Reader stream to be used for testing") + r := strings.NewReader(readerStream) cipher := New(r, rand.Reader) src := make([]byte, size) @@ -57,13 +61,12 @@ func TestCryptoOnly(t *testing.T) { } func TestUserOnly(t *testing.T) { - seed := "some io.Reader stream to be used for testing" - cipher1 := New(strings.NewReader(seed)) + cipher1 := New(strings.NewReader(readerStream)) src := make([]byte, size) copy(src, []byte("hello")) dst1 := make([]byte, size) cipher1.XORKeyStream(dst1, src) - cipher2 := New(strings.NewReader(seed)) + cipher2 := New(strings.NewReader(readerStream)) dst2 := make([]byte, size) cipher2.XORKeyStream(dst2, src) if !bytes.Equal(dst1, dst2) { @@ -84,3 +87,63 @@ func TestIncorrectSize(t *testing.T) { dst := make([]byte, size+1) cipher.XORKeyStream(dst, src) } + +func TestBits(t *testing.T) { + testCases := []struct { + bitlen uint // input bit length + exact bool // whether the exact bit length should be enforced + }{ + {bitlen: 128, exact: false}, + {bitlen: 256, exact: true}, + {bitlen: 512, exact: false}, + {bitlen: 1024, exact: true}, + } + + for _, tc := range testCases { + t.Run(fmt.Sprintf("bitlen: %d exact: %s", tc.bitlen, strconv.FormatBool(tc.exact)), func(t *testing.T) { + r := strings.NewReader(readerStream) + cipher := New(r, rand.Reader) + + bigIntBytes := Bits(tc.bitlen, tc.exact, cipher) + bigInt := new(big.Int).SetBytes(bigIntBytes) + + // Check if the bit length matches the expected length + expectedBitLen := tc.bitlen + if tc.exact && uint(bigInt.BitLen()) != expectedBitLen { + t.Errorf("Generated BigInt with exact bits doesn't match the expected bit length: got %d, expected %d", bigInt.BitLen(), expectedBitLen) + } else if !tc.exact && uint(bigInt.BitLen()) > expectedBitLen { + t.Errorf("Generated BigInt with more bits than maximum bit length: got %d, expected at most %d", bigInt.BitLen(), expectedBitLen) + } + }) + } +} + +func TestInt(t *testing.T) { + testCases := []struct { + modulusBitLen uint // Bit length of the modulus + }{ + {128}, + {256}, + {512}, + {1024}, + } + + for _, tc := range testCases { + t.Run(fmt.Sprintf("modulusBitlen: %d", tc.modulusBitLen), func(t *testing.T) { + modulus, err := rand.Prime(rand.Reader, int(tc.modulusBitLen)) + if err != nil { + t.Fatalf("Failed to generate random prime: %v", err) + } + + r := strings.NewReader(readerStream) + cipher := New(r, rand.Reader) + + randomInt := Int(modulus, cipher) + + // Check if the generated BigInt is less than the modulus + if randomInt.Cmp(modulus) >= 0 { + t.Errorf("Generated BigInt %v is not less than the modulus %v", randomInt, modulus) + } + }) + } +}