Stage 1 of Phase 2 – Nekryptology

This commit is contained in:
Cassandra Heart 2023-07-05 00:32:28 -05:00
parent d43a4aeac8
commit ff6715575f
No known key found for this signature in database
GPG Key ID: 6352152859385958
393 changed files with 95232 additions and 69439 deletions

View File

@ -1,16 +0,0 @@
FROM golang:1.18
WORKDIR /app
COPY go.mod go.sum ./
RUN go mod download
# Add an entry to .bash_history so we can just run `make dev` and hit up to test the cli
RUN echo 'go run ./... test-voucher.hex' >> ~/.bash_history
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -o ceremony-client
CMD ./ceremony-client

View File

@ -1,13 +0,0 @@
IMAGE_TAG := quilibrium-ceremony-client
build-docker:
docker build -t $(IMAGE_TAG) .
bash:
docker run --rm -it $(IMAGE_TAG) bash
participate: build-docker
docker run --rm -it -v $(PWD)/vouchers:/vouchers $(IMAGE_TAG) ./ceremony-client "/vouchers/quil-voucher-$(shell date +'%m.%d.%y-%H:%M:%S').hex"
dev:
docker run --rm -it -v $(PWD):$(PWD) --workdir $(PWD) $(IMAGE_TAG) bash

View File

@ -1,13 +1,4 @@
# ceremonyclient
# monorepo
KZG Ceremony client for Quilibrium.
The Quilibrium monorepo is the combined collection of libraries which power the Quilibrium Protocol.
# Running
Run with `go run ./... <voucher_filename>` or omit the filename to write to quil_voucher.hex.
If you have docker installed you can participate in the ceremony by simply running `make participate`. Your voucher will be written to `vouchers/`.
## Additional Features
Run with `go run ./... verify-transcript` to verify the latest state of the sequencer, or `go run ./... check-voucher <voucher_filename>` to verify voucher inclusion in the latest state. Please keep in mind voucher inclusions are not immediate after contribution the current batch must be processed before it will appear, and if there was an error response from the sequencer when contributing the voucher will not be included.

65808
basis.json

File diff suppressed because it is too large Load Diff

View File

@ -1,542 +0,0 @@
package main
import (
"bytes"
"crypto"
"crypto/rand"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"os"
"strings"
"sync"
"github.com/cloudflare/circl/sign/ed448"
"golang.org/x/sync/errgroup"
bls48581 "source.quilibrium.com/quilibrium/ceremonyclient/ec/bls48581"
)
const SEQUENCER_ACCEPTING = "\"ACCEPTING\""
type PowersOfTauJson struct {
G1Affines []string `json:"G1Powers"`
G2Affines []string `json:"G2Powers"`
}
type ContributionJson struct {
PowersOfTau PowersOfTauJson `json:"powersOfTau"`
PotPubKey string `json:"potPubKey"`
VoucherPubKey string `json:"voucherPubKey"`
}
type BatchContribution struct {
Contribution Contribution
}
type PowersOfTau struct {
G1Affines []*bls48581.ECP
G2Affines []*bls48581.ECP8
}
type CeremonyState struct {
PowersOfTau PowersOfTauJson `json:"powersOfTau"`
PotPubKey string `json:"potPubKey"`
Witness Witness `json:"witness"`
VoucherPubKeys []string `json:"voucherPubKeys"`
}
type Witness struct {
RunningProducts []string `json:"runningProducts"`
PotPubKeys []string `json:"potPubKeys"`
}
type Contribution struct {
NumG1Powers int
NumG2Powers int
PowersOfTau PowersOfTau
PotPubKey *bls48581.ECP8
}
var voucherPubKey ed448.PublicKey
var voucher ed448.PrivateKey
var secret *bls48581.BIG
var bcj *ContributionJson = &ContributionJson{}
func JoinLobby() {
var err error
if voucherPubKey == nil {
voucherPubKey, voucher, err = ed448.GenerateKey(rand.Reader)
if err != nil {
panic(err)
}
}
sig, err := voucher.Sign(rand.Reader, []byte("JOIN"), ed448.SignerOptions{Hash: crypto.Hash(0), Scheme: ed448.ED448})
if err != nil {
panic(err)
}
reqHex := hex.EncodeToString(voucherPubKey)
sigHex := hex.EncodeToString(sig)
req, err := http.NewRequest("POST", HOST+"join", bytes.NewBuffer([]byte(reqHex)))
if err != nil {
panic(err)
}
req.Header.Set("Content-Type", "text/plain")
req.Header.Set("Authorization", sigHex)
client := http.DefaultClient
resp, err := client.Do(req)
fmt.Println("Connected to sequencer!")
if err != nil {
panic(err)
} else {
responseData, err := io.ReadAll(resp.Body)
if err != nil {
panic(err)
} else {
if resp.StatusCode != 200 {
fmt.Printf("Status code %d given by sequencer: \n", resp.StatusCode)
panic(string(responseData))
}
return
}
}
}
func GetSequencerState() string {
req, err := http.NewRequest("POST", HOST+"sequencer_state", bytes.NewBuffer([]byte("{}")))
if err != nil {
panic(err)
}
req.Header.Set("Content-Type", "application/json")
client := http.DefaultClient
resp, err := client.Do(req)
if err != nil {
panic(err)
}
sequencerState, err := io.ReadAll(resp.Body)
if err != nil {
panic(err)
}
if resp.StatusCode != 200 {
fmt.Printf("Status code %d given by sequencer: \n", resp.StatusCode)
panic(string(sequencerState))
}
return string(sequencerState)
}
func Bootstrap() {
secretBytes := make([]byte, (8 * int(bls48581.MODBYTES)))
rand.Read(secretBytes)
secret = bls48581.FromBytes(secretBytes)
secret.Mod(bls48581.NewBIGints(bls48581.CURVE_Order))
bcjRes, err := http.DefaultClient.Post(HOST+"current_state", "application/json", bytes.NewBufferString("{}"))
if err != nil {
panic(err)
}
defer bcjRes.Body.Close()
bcjBytes, err := io.ReadAll(bcjRes.Body)
if err != nil {
panic(err)
} else {
if bcjRes.StatusCode != 200 {
fmt.Printf("Status code %d given by sequencer: \n", bcjRes.StatusCode)
panic(string(bcjBytes))
}
}
if err := json.Unmarshal(bcjBytes, bcj); err != nil {
// message is not conformant, we are in validating phase
panic(err)
}
contributeWithSecrets(secret)
}
func contributeWithSecrets(secret *bls48581.BIG) error {
updatePowersOfTau(secret)
updateWitness(secret)
return nil
}
var xi []*bls48581.BIG
func updatePowersOfTau(secret *bls48581.BIG) {
xi = append(xi, bls48581.NewBIGint(1))
for i := 0; i < 65536; i++ {
xi = append(xi, bls48581.Modmul(xi[i], secret, bls48581.NewBIGints(bls48581.CURVE_Order)))
}
wg := sync.WaitGroup{}
wg.Add(65536)
for i := 0; i < 65536; i++ {
i := i
go func() {
g1PowersString := strings.TrimPrefix(bcj.PowersOfTau.G1Affines[i], "0x")
g1PowersHex, _ := hex.DecodeString(g1PowersString)
g1Power := bls48581.ECP_fromBytes(g1PowersHex)
if g1Power.Equals(bls48581.NewECP()) {
panic("invalid g1Power")
}
g1Power = g1Power.Mul(xi[i])
g1Power.ToBytes(g1PowersHex, true)
bcj.PowersOfTau.G1Affines[i] = "0x" + hex.EncodeToString(g1PowersHex)
if i < 257 {
g2PowersString := strings.TrimPrefix(bcj.PowersOfTau.G2Affines[i], "0x")
g2PowersHex, _ := hex.DecodeString(g2PowersString)
g2Power := bls48581.ECP8_fromBytes(g2PowersHex)
if g2Power.Equals(bls48581.NewECP8()) {
panic("invalid g2Power")
}
g2Power = g2Power.Mul(xi[i])
g2Power.ToBytes(g2PowersHex, true)
bcj.PowersOfTau.G2Affines[i] = "0x" + hex.EncodeToString(g2PowersHex)
}
wg.Done()
}()
}
wg.Wait()
}
func updateWitness(secret *bls48581.BIG) {
g2PowersString := strings.TrimPrefix(bcj.PotPubKey, "0x")
g2PowersHex, _ := hex.DecodeString(g2PowersString)
g2Power := bls48581.ECP8_fromBytes(g2PowersHex)
x := bls48581.Modmul(bls48581.NewBIGint(1), secret, bls48581.NewBIGints(bls48581.CURVE_Order))
if g2Power.Equals(bls48581.NewECP8()) {
panic("invalid g2Power")
}
g2Power = g2Power.Mul(x)
g2Power.ToBytes(g2PowersHex, true)
bcj.PotPubKey = "0x" + hex.EncodeToString(g2PowersHex)
bcj.VoucherPubKey = "0x" + hex.EncodeToString(voucherPubKey)
}
func ContributeAndGetVoucher() {
sendBytes, err := json.Marshal(bcj)
if err != nil {
panic(err)
}
req, err := http.NewRequest("POST", HOST+"contribute", bytes.NewBuffer(sendBytes))
if err != nil {
panic(err)
}
req.Header.Set("Content-Type", "application/json")
sig, err := voucher.Sign(rand.Reader, []byte(bcj.PotPubKey), ed448.SignerOptions{Hash: crypto.Hash(0), Scheme: ed448.ED448})
if err != nil {
panic(err)
}
sigHex := hex.EncodeToString(sig)
req.Header.Set("Authorization", sigHex)
client := http.DefaultClient
resp, err := client.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
filename := "quil_voucher.hex"
if len(os.Args) > 1 {
filename = os.Args[1]
} else {
fmt.Println("Voucher file name not provided, writing to quil_voucher.hex")
}
if err := os.WriteFile(filename, []byte(hex.EncodeToString(voucher)), 0644); err != nil {
fmt.Println("Could not write voucher to file, voucher hex string below:")
fmt.Println(hex.EncodeToString(voucher))
}
}
func VerifyState() {
csjRes, err := http.DefaultClient.Post(HOST+"current_state", "application/json", bytes.NewBufferString("{}"))
if err != nil {
panic(err)
}
defer csjRes.Body.Close()
csjBytes, err := io.ReadAll(csjRes.Body)
if err != nil {
panic(err)
}
currentStateJson := &CeremonyState{}
if err := json.Unmarshal(csjBytes, currentStateJson); err != nil {
// message is not conformant, we are in validating phase
panic(err)
}
verifyState(currentStateJson)
}
func CheckVoucherInclusion(path string) {
csjRes, err := http.DefaultClient.Post(HOST+"current_state", "application/json", bytes.NewBufferString("{}"))
if err != nil {
panic(err)
}
defer csjRes.Body.Close()
csjBytes, err := io.ReadAll(csjRes.Body)
if err != nil {
panic(err)
}
currentStateJson := &CeremonyState{}
if err := json.Unmarshal(csjBytes, currentStateJson); err != nil {
// message is not conformant, we are in validating phase
panic(err)
}
voucherHex, err := os.ReadFile(path)
if err != nil {
panic(err)
}
decodedVoucher, err := hex.DecodeString(string(voucherHex))
if err != nil {
panic(err)
}
privKey := ed448.PrivateKey(decodedVoucher)
verifyPubKey := "0x" + hex.EncodeToString(privKey.Public().(ed448.PublicKey))
for i, v := range currentStateJson.VoucherPubKeys {
if v == verifyPubKey {
fmt.Printf("Voucher pubkey found at index %d\n", i)
os.Exit(0)
}
}
panic(errors.New("voucher not found"))
}
func verifyState(currentState *CeremonyState) {
wg := &errgroup.Group{}
// This limit needs to be low this check is a very painfully CPU intensive operation
wg.SetLimit(8)
fmt.Println("Checking running products of witnesses...")
// check the pairings
for j := 0; j < len(currentState.Witness.RunningProducts)-1; j++ {
j := j
wg.Go(func() error {
fmt.Printf("Checking witness at %d\n", j)
currRunningProductHex := strings.TrimPrefix(currentState.Witness.RunningProducts[j], "0x")
currRunningProductBytes, err := hex.DecodeString(currRunningProductHex)
if err != nil {
return fmt.Errorf("could not decode G1 at %d", j)
}
currRunningProduct := bls48581.ECP_fromBytes(currRunningProductBytes)
if currRunningProduct == nil {
return fmt.Errorf("could not convert G1 at %d", j)
}
nextRunningProductHex := strings.TrimPrefix(currentState.Witness.RunningProducts[j+1], "0x")
nextRunningProductBytes, err := hex.DecodeString(nextRunningProductHex)
if err != nil {
return fmt.Errorf("could not decode next G1 at %d", j)
}
nextRunningProduct := bls48581.ECP_fromBytes(nextRunningProductBytes)
if nextRunningProduct == nil {
return fmt.Errorf("could not convert next G1 at %d", j)
}
potPubKeyHex := strings.TrimPrefix(currentState.Witness.PotPubKeys[j+1], "0x")
potPubKeyBytes, err := hex.DecodeString(potPubKeyHex)
if err != nil {
return fmt.Errorf("could not decode POT pubkey at %d", j)
}
potPubKey := bls48581.ECP8_fromBytes(potPubKeyBytes)
if potPubKey == nil {
return fmt.Errorf("could not convert POT pubkey at %d", j)
}
prevPotPubKeyHex := strings.TrimPrefix(currentState.Witness.PotPubKeys[j], "0x")
prevPotPubKeyBytes, err := hex.DecodeString(prevPotPubKeyHex)
if err != nil {
return fmt.Errorf("could not decode POT pubkey at %d", j)
}
prevPotPubKey := bls48581.ECP8_fromBytes(prevPotPubKeyBytes)
if prevPotPubKey == nil {
return fmt.Errorf("could not convert POT pubkey at %d", j)
}
if !pairCheck(potPubKey, currRunningProduct, prevPotPubKey, nextRunningProduct) {
return fmt.Errorf("pairing check failed")
}
return nil
})
}
fmt.Println("Checking latest witness parity...")
// Check that the last running product is equal to G1 first power.
lastRunningProductIdx := len(currentState.Witness.RunningProducts) - 1
lastRunningProduct := currentState.Witness.RunningProducts[lastRunningProductIdx]
if lastRunningProduct != currentState.PowersOfTau.G1Affines[1] {
panic("mismatched running products for G1")
}
// Check that the first running product is the tau^0 power.
firstRunningProduct := currentState.Witness.RunningProducts[0]
if firstRunningProduct != currentState.PowersOfTau.G1Affines[0] {
panic("mismatched first product for G1")
}
fmt.Println("Checking coherency of G1 powers...")
// Check coherency of powers
for j := 0; j < 65535; j++ {
j := j
wg.Go(func() error {
fmt.Printf("Checking coherency of G1 at %d\n", j)
baseTauG2Hex := strings.TrimPrefix(currentState.PowersOfTau.G2Affines[1], "0x")
baseTauG2Bytes, err := hex.DecodeString(baseTauG2Hex)
if err != nil {
return fmt.Errorf("failed to decode for G2 at %d", j)
}
baseTauG2 := bls48581.ECP8_fromBytes(baseTauG2Bytes)
if baseTauG2 == nil {
return fmt.Errorf("failed to convert for G2 at %d", j)
}
currG1Hex := strings.TrimPrefix(currentState.PowersOfTau.G1Affines[j], "0x")
currG1Bytes, err := hex.DecodeString(currG1Hex)
if err != nil {
return fmt.Errorf("failed to decode for G1 at %d", j)
}
currG1 := bls48581.ECP_fromBytes(currG1Bytes)
if currG1 == nil {
return fmt.Errorf("failed to convert for G1 at %d", j)
}
nextG1Hex := strings.TrimPrefix(currentState.PowersOfTau.G1Affines[j+1], "0x")
nextG1Bytes, err := hex.DecodeString(nextG1Hex)
if err != nil {
return fmt.Errorf("failed to decode for G1 at %d", j+1)
}
nextG1 := bls48581.ECP_fromBytes(nextG1Bytes)
if nextG1 == nil {
return fmt.Errorf("failed to convert for G1 at %d", j+1)
}
if !pairCheck(baseTauG2, currG1, bls48581.ECP8_generator(), nextG1) {
return fmt.Errorf("pairing check failed")
}
return nil
})
}
fmt.Println("Checking coherency of G2 powers...")
// Check G2 powers are coherent
for j := 0; j < 256; j++ {
j := j
wg.Go(func() error {
fmt.Printf("Checking coherency of G2 at %d\n", j)
baseTauG1Hex := strings.TrimPrefix(currentState.PowersOfTau.G1Affines[1], "0x")
baseTauG1Bytes, err := hex.DecodeString(baseTauG1Hex)
if err != nil {
return fmt.Errorf("failed to decode for G1 at %d", j)
}
baseTauG1 := bls48581.ECP_fromBytes(baseTauG1Bytes)
if baseTauG1 == nil {
return fmt.Errorf("failed to convert for G1 at %d", j)
}
currG2Hex := strings.TrimPrefix(currentState.PowersOfTau.G2Affines[j], "0x")
currG2Bytes, err := hex.DecodeString(currG2Hex)
if err != nil {
return fmt.Errorf("failed to decode for G2 at %d", j)
}
currG2 := bls48581.ECP8_fromBytes(currG2Bytes)
if currG2 == nil {
return fmt.Errorf("failed to convert for G1 at %d", j)
}
nextG2Hex := strings.TrimPrefix(currentState.PowersOfTau.G2Affines[j+1], "0x")
nextG2Bytes, err := hex.DecodeString(nextG2Hex)
if err != nil {
return fmt.Errorf("failed to decode for G2 at %d", j+1)
}
nextG2 := bls48581.ECP8_fromBytes(nextG2Bytes)
if nextG2 == nil {
return fmt.Errorf("failed to convert for G2 at %d", j+1)
}
if !pairCheck(currG2, baseTauG1, nextG2, bls48581.ECP_generator()) {
return fmt.Errorf("pairing check failed")
}
return nil
})
}
if err := wg.Wait(); err != nil {
panic(fmt.Errorf("error validating transcript: %w", err))
}
fmt.Println("Current state is valid Powers of Tau!")
}
func pairCheck(G21 *bls48581.ECP8, G11 *bls48581.ECP, G22 *bls48581.ECP8, G12 *bls48581.ECP) bool {
G12.Neg()
v := bls48581.Ate2(G21, G11, G22, G12)
v = bls48581.Fexp(v)
if !v.Isunity() {
fmt.Println("pairing check failed")
return false
}
return true
}

View File

@ -1,796 +0,0 @@
/*
* Copyright (c) 2012-2020 MIRACL UK Ltd.
*
* This file is part of MIRACL Core
* (see https://github.com/miracl/core).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* Kyber API */
package core
//import "fmt"
const KY_LGN uint = 8
const KY_DEGREE int = (1 << KY_LGN);
const KY_PRIME int32 = 0xD01
const KY_ONE int32 = 0x549 // R mod Q
const KY_QINV int32 = 62209 // q^(-1) mod 2^16
const KYBER_SECRET_CPA_SIZE_512 int = (2*(KY_DEGREE*3)/2)
const KYBER_PUBLIC_SIZE_512 int = (32+2*(KY_DEGREE*3)/2)
const KYBER_CIPHERTEXT_SIZE_512 int = ((10*2+4)*KY_DEGREE/8)
const KYBER_SECRET_CCA_SIZE_512 int = (KYBER_SECRET_CPA_SIZE_512+KYBER_PUBLIC_SIZE_512+64)
const KYBER_SHARED_SECRET_512 int = 32
const KYBER_SECRET_CPA_SIZE_768 int = (3*(KY_DEGREE*3)/2)
const KYBER_PUBLIC_SIZE_768 int = (32+3*(KY_DEGREE*3)/2)
const KYBER_CIPHERTEXT_SIZE_768 int = ((10*3+4)*KY_DEGREE/8)
const KYBER_SECRET_CCA_SIZE_768 int = (KYBER_SECRET_CPA_SIZE_768+KYBER_PUBLIC_SIZE_768+64)
const KYBER_SHARED_SECRET_768 int = 32
const KYBER_SECRET_CPA_SIZE_1024 int = (4*(KY_DEGREE*3)/2)
const KYBER_PUBLIC_SIZE_1024 int = (32+4*(KY_DEGREE*3)/2)
const KYBER_CIPHERTEXT_SIZE_1024 int = ((11*4+5)*KY_DEGREE/8)
const KYBER_SECRET_CCA_SIZE_1024 int = (KYBER_SECRET_CPA_SIZE_1024+KYBER_PUBLIC_SIZE_1024+64)
const KYBER_SHARED_SECRET_1024 int = 32
const KY_MAXK = 4;
// parameters for each security level
// K,eta1,eta2,du,dv,shared secret
var PARAMS_512 = [6]int{2,3,2,10,4,32}
var PARAMS_768 = [6]int{3,2,2,10,4,32}
var PARAMS_1024 = [6]int{4,2,2,11,5,32}
/* Translated from public domain reference implementation code - taken from https://github.com/pq-crystals/kyber */
var ZETAS = [256]int16{
-1044, -758, -359, -1517, 1493, 1422, 287, 202,
-171, 622, 1577, 182, 962, -1202, -1474, 1468,
573, -1325, 264, 383, -829, 1458, -1602, -130,
-681, 1017, 732, 608, -1542, 411, -205, -1571,
1223, 652, -552, 1015, -1293, 1491, -282, -1544,
516, -8, -320, -666, -1618, -1162, 126, 1469,
-853, -90, -271, 830, 107, -1421, -247, -951,
-398, 961, -1508, -725, 448, -1065, 677, -1275,
-1103, 430, 555, 843, -1251, 871, 1550, 105,
422, 587, 177, -235, -291, -460, 1574, 1653,
-246, 778, 1159, -147, -777, 1483, -602, 1119,
-1590, 644, -872, 349, 418, 329, -156, -75,
817, 1097, 603, 610, 1322, -1285, -1465, 384,
-1215, -136, 1218, -1335, -874, 220, -1187, -1659,
-1185, -1530, -1278, 794, -1510, -854, -870, 478,
-108, -308, 996, 991, 958, -1460, 1522, 1628}
func montgomery_reduce(a int32) int16 {
t := int16(a*KY_QINV)
t = int16((a - int32(t)*KY_PRIME) >> 16)
return t
}
func barrett_reduce(a int16) int16 {
v := int16(((int32(1)<<26) + KY_PRIME/2)/KY_PRIME)
vv := int32(v)
aa := int32(a)
t := int16((vv*aa + 0x2000000) >> 26);
t *= int16(KY_PRIME)
return int16(a - t);
}
func fqmul(a int16, b int16) int16 {
return montgomery_reduce(int32(a)*int32(b));
}
func ntt(r []int16) {
var j int
k := 1
for len := 128; len >= 2; len >>= 1 {
for start := 0; start < 256; start = j + len {
zeta := ZETAS[k]; k+=1
for j = start; j < start + len; j++ {
t := fqmul(zeta, r[j + len])
r[j + len] = r[j] - t
r[j] = r[j] + t
}
}
}
}
func invntt(r []int16) {
var j int
f := int16(1441) // mont^2/128
k := 127
for len := 2; len <= 128; len <<= 1 {
for start := 0; start < 256; start = j + len {
zeta := ZETAS[k]; k-=1
for j = start; j < start + len; j++ {
t := r[j]
r[j] = barrett_reduce(t + r[j + len])
r[j + len] = (r[j + len] - t)
r[j + len] = fqmul(zeta, r[j + len])
}
}
}
for j := 0; j < 256; j++ {
r[j] = fqmul(r[j], f)
}
}
func basemul(index int,r []int16, a []int16, b []int16,zeta int16) {
i:=index
j:=index+1
r[i] = fqmul(a[j], b[j])
r[i] = fqmul(r[i], zeta)
r[i] += fqmul(a[i], b[i])
r[j] = fqmul(a[i], b[j])
r[j] += fqmul(a[j], b[i])
}
func poly_reduce(r []int16) {
for i:=0;i<KY_DEGREE;i++ {
r[i] = barrett_reduce(r[i])
}
}
func poly_ntt(r []int16) {
ntt(r)
poly_reduce(r)
}
func poly_invntt(r []int16) {
invntt(r)
}
// Note r must be distinct from a and b
func poly_mul(r []int16, a []int16, b []int16) {
for i := 0; i < KY_DEGREE/4; i++ {
basemul(4*i,r,a,b,ZETAS[64+i])
basemul(4*i+2,r,a,b,-ZETAS[64+i])
}
}
func poly_tomont(r []int16) {
f := int32(KY_ONE);
for i:=0;i<KY_DEGREE;i++ {
r[i] = montgomery_reduce(int32(r[i])*f)
}
}
/* End of public domain reference code use */
// copy polynomial
func poly_copy(p1 []int16, p2 []int16) {
for i := 0; i < KY_DEGREE; i++ {
p1[i] = p2[i]
}
}
// zero polynomial
func poly_zero(p1 []int16) {
for i := 0; i < KY_DEGREE; i++ {
p1[i] = 0
}
}
// add polynomials
func poly_add(p1 []int16, p2 []int16, p3 []int16) {
for i := 0; i < KY_DEGREE; i++ {
p1[i] = (p2[i] + p3[i])
}
}
// subtract polynomials
func poly_sub(p1 []int16, p2 []int16, p3 []int16) {
for i := 0; i < KY_DEGREE; i++ {
p1[i] = (p2[i] - p3[i])
}
}
// Generate A[i][j] from rho
func expandAij(rho []byte,Aij []int16,i int,j int) {
sh := NewSHA3(SHA3_SHAKE128)
var buff [640]byte // should be plenty (?)
for m:=0;m<32;m++ {
sh.Process(rho[m])
}
sh.Process(byte(j&0xff))
sh.Process(byte(i&0xff))
sh.Shake(buff[:],640)
i = 0
j = 0
for j<KY_DEGREE {
d1 := int16(buff[i])+256*int16(buff[i+1]&0x0F);
d2 := int16(buff[i+1])/16+16*int16(buff[i+2]);
if (d1<int16(KY_PRIME)) {
Aij[j]=d1; j+=1
}
if (d2<int16(KY_PRIME) && j<KY_DEGREE) {
Aij[j]=d2; j+=1
}
i+=3
}
}
// get n-th bit from byte array
func getbit(b []byte,n int) int {
wd:=n/8;
bt:=n%8;
return int((b[wd]>>bt)&1)
}
// centered binomial distribution
func cbd(bts []byte,eta int,f []int16) {
for i:=0;i<KY_DEGREE;i++ {
a:=0; b:=0
for j:=0;j<eta;j++ {
a+=getbit(bts,2*i*eta+j)
b+=getbit(bts,2*i*eta+eta+j)
}
f[i]=int16(a-b)
}
}
// extract ab bits into word from dense byte stream
func nextword(ab int,t []byte,position []int) int16 {
ptr:=position[0] // index in array
bts:=position[1] // bit index in byte
r:=int16(t[ptr]>>bts)
mask:=int16((1<<ab)-1)
i:=0
gotbits:=8-bts // bits left in current byte
for gotbits<ab {
i++
w:=int16(t[ptr+i])
r|=w<<gotbits
gotbits+=8
}
bts+=ab
for bts>=8{
bts-=8
ptr++
}
w:=int16(r&mask)
position[0]=ptr
position[1]=bts
return w
}
// array t has ab active bits per word
// extract bytes from array of words
// if max!=0 then -max<=t[i]<=+max
func nextbyte16(ab int,t []int16,position []int) byte {
ptr:=position[0] // index in array
bts:=position[1] // bit index in byte
left:=ab-bts // number of bits left in this word
i:=0
k:=ptr%256
w:=t[k]; w+=(w>>15)&int16(KY_PRIME)
r:=int16(w>>bts);
for left<8 {
i++
w=t[k+i]; w+=(w>>15)&int16(KY_PRIME)
r|=w<<left
left+=ab
}
bts+=8
for bts>=ab {
bts-=ab;
ptr++;
}
position[0]=ptr
position[1]=bts
return byte(r&0xff);
}
// encode polynomial vector of length len with coefficients of length L, into packed bytes
func encode(t []int16,pos []int,L int,pack []byte,pptr int) {
k:=(KY_DEGREE*L)/8 // compressed length
for n:=0;n<k;n++ {
pack[n+pptr*k]=nextbyte16(L,t,pos)
}
}
func chk_encode(t []int16,pos []int,L int,pack []byte,pptr int) byte {
k:=(KY_DEGREE*L)/8
diff:=byte(0)
for n:=0;n<k;n++ {
m:=nextbyte16(L,t,pos)
diff|=(m^pack[n+pptr*k])
}
return diff;
}
// decode packed bytes into polynomial vector, with coefficients of length L
// pos indicates current position in byte array pack
func decode(pack []byte,L int,t []int16,pos []int) {
for i:=0;i<KY_DEGREE;i++ {
t[i]=nextword(L,pack,pos)
}
}
// compress polynomial coefficents in place, for polynomial vector of length len
func compress(t []int16,d int) {
twod:=int32(1<<d)
for i:=0;i<KY_DEGREE;i++ {
t[i]+=(t[i]>>15)&int16(KY_PRIME)
t[i]= int16(((twod*int32(t[i])+KY_PRIME/2)/KY_PRIME)&(twod-1))
}
}
// decompress polynomial coefficents in place, for polynomial vector of length len
func decompress(t []int16,d int) {
twod1:=int32(1<<(d-1))
for i:=0;i<KY_DEGREE;i++ {
t[i]=int16((KY_PRIME*int32(t[i])+twod1)>>d)
}
}
// input entropy, output key pair
func cpa_keypair(params [6]int,tau []byte,sk []byte,pk []byte) {
sh := NewSHA3(SHA3_HASH512)
var rho [32]byte
var sigma [33]byte
var buff [256]byte
ck:=params[0]
var r [KY_DEGREE]int16
var w [KY_DEGREE]int16
var Aij [KY_DEGREE]int16
var s= make([][KY_DEGREE]int16, ck)
var e= make([][KY_DEGREE]int16, ck)
var p= make([][KY_DEGREE]int16, ck)
eta1:=params[1]
public_key_size:=32+ck*(KY_DEGREE*3)/2
// secret_cpa_key_size:=ck*(KY_DEGREE*3)/2
for i:=0;i<32;i++ {
sh.Process(tau[i])
}
bf := sh.Hash();
for i:=0;i<32;i++ {
rho[i]=bf[i]
sigma[i]=bf[i+32]
}
sigma[32]=0 // N
// create s
for i:=0;i<ck;i++ {
sh= NewSHA3(SHA3_SHAKE256)
for j:=0;j<33;j++{
sh.Process(sigma[j])
}
sh.Shake(buff[:],64*eta1);
cbd(buff[:],eta1,s[i][:])
sigma[32]+=1
}
// create e
for i:=0;i<ck;i++ {
sh= NewSHA3(SHA3_SHAKE256)
for j:=0;j<33;j++ {
sh.Process(sigma[j])
}
sh.Shake(buff[:],64*eta1)
cbd(buff[:],eta1,e[i][:])
sigma[32]+=1
}
for k:=0;k<ck;k++ {
poly_ntt(s[k][:])
poly_ntt(e[k][:])
}
for i:=0;i<ck;i++ {
expandAij(rho[:],Aij[:],i,0)
poly_mul(r[:],Aij[:],s[0][:])
for j:=1;j<ck;j++ {
expandAij(rho[:],Aij[:],i,j)
poly_mul(w[:],s[j][:],Aij[:])
poly_add(r[:],r[:],w[:])
}
poly_reduce(r[:])
poly_tomont(r[:])
poly_add(p[i][:],r[:],e[i][:])
poly_reduce(p[i][:])
}
var pos [2]int
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
encode(s[i][:],pos[:],12,sk,i)
}
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
encode(p[i][:],pos[:],12,pk,i)
}
for i:=0;i<32;i++ {
pk[public_key_size-32+i]=rho[i]
}
}
// input 64 random bytes, output secret and public keys
func cca_keypair(params [6]int,randbytes64 []byte,sk []byte,pk []byte) {
sh:= NewSHA3(SHA3_HASH256)
sks:=(params[0]*(KY_DEGREE*3)/2)
pks:=(32+params[0]*(KY_DEGREE*3)/2)
cpa_keypair(params,randbytes64[0:32],sk,pk)
for i:=0;i<pks;i++ {
sk[sks+i]=pk[i]
}
for i:=0;i<pks;i++ {
sh.Process(pk[i])
}
h:=sh.Hash();
for i:=0;i<32;i++ {
sk[sks+pks+i]=h[i]
}
for i:=0;i<32;i++ {
sk[sks+pks+32+i]=randbytes64[32+i]
}
}
func cpa_base_encrypt(params [6]int,coins []byte,pk []byte,ss []byte,u [][256]int16, v []int16) {
var rho [32]byte
var sigma [33]byte
var buff [256]byte
ck:=params[0]
var r [KY_DEGREE]int16
var w [KY_DEGREE]int16
var Aij [KY_DEGREE]int16
var q= make([][KY_DEGREE]int16, ck)
var p= make([][KY_DEGREE]int16, ck)
eta1:=params[1]
eta2:=params[2]
du:=params[3]
dv:=params[4]
public_key_size:=32+ck*(KY_DEGREE*3)/2
for i:=0;i<32;i++ {
sigma[i]=coins[i] //i+6 //RAND_byte(RNG);
}
sigma[32]=0
for i:=0;i<32;i++ {
rho[i]=pk[public_key_size-32+i]
}
// create q
for i:=0;i<ck;i++ {
sh := NewSHA3(SHA3_SHAKE256)
for j:=0;j<33;j++ {
sh.Process(sigma[j])
}
sh.Shake(buff[:],64*eta1)
cbd(buff[:],eta1,q[i][:])
sigma[32]+=1
}
// create e1
for i:=0;i<ck;i++ {
sh := NewSHA3(SHA3_SHAKE256)
for j:=0;j<33;j++ {
sh.Process(sigma[j])
}
sh.Shake(buff[:],64*eta2);
cbd(buff[:],eta1,u[i][:]) // e1
sigma[32]+=1
}
for i:=0;i<ck;i++ {
poly_ntt(q[i][:])
}
for i:=0;i<ck;i++ {
expandAij(rho[:],Aij[:],0,i)
poly_mul(r[:],Aij[:],q[0][:])
for j:=1;j<ck;j++ {
expandAij(rho[:],Aij[:],j,i)
poly_mul(w[:],q[j][:],Aij[:])
poly_add(r[:],r[:],w[:])
}
poly_reduce(r[:]);
poly_invntt(r[:]);
poly_add(u[i][:],u[i][:],r[:]);
poly_reduce(u[i][:]);
}
var pos [2]int
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
decode(pk,12,p[i][:],pos[:])
}
poly_mul(v[:],p[0][:],q[0][:])
for i:=1;i<ck;i++ {
poly_mul(r[:],p[i][:],q[i][:])
poly_add(v[:],v[:],r[:])
}
poly_invntt(v[:])
// create e2
sh := NewSHA3(SHA3_SHAKE256)
for j:=0;j<33;j++ {
sh.Process(sigma[j])
}
sh.Shake(buff[:],64*eta2)
cbd(buff[:],eta1,w[:]) // e2
poly_add(v[:],v[:],w[:])
pos[0]=0; pos[1]=0
decode(ss,1,r[:],pos[:])
decompress(r[:],1)
poly_add(v[:],v[:],r[:])
poly_reduce(v[:])
for i:=0;i<ck;i++ {
compress(u[i][:],du)
}
compress(v[:],dv)
}
// Given input of entropy, public key and shared secret is an input, outputs ciphertext
func cpa_encrypt(params [6]int,coins []byte,pk []byte,ss []byte,ct []byte) {
ck:=params[0]
var v [KY_DEGREE]int16
var u= make([][KY_DEGREE]int16, ck)
du:=params[3]
dv:=params[4]
ciphertext_size:=(du*ck+dv)*KY_DEGREE/8
cpa_base_encrypt(params,coins,pk,ss,u,v[:])
var pos [2]int
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
encode(u[i][:],pos[:],du,ct,i)
}
encode(v[:],pos[:],dv,ct[ciphertext_size-(dv*KY_DEGREE/8):ciphertext_size],0)
}
// Re-encrypt and check that ct is OK (if so return is zero)
func cpa_check_encrypt(params [6]int,coins []byte,pk []byte,ss []byte,ct []byte) byte {
ck:=params[0]
var v [KY_DEGREE]int16
var u= make([][KY_DEGREE]int16, ck)
du:=params[3]
dv:=params[4]
ciphertext_size:=(du*ck+dv)*KY_DEGREE/8
d1:=byte(0)
cpa_base_encrypt(params,coins,pk,ss,u,v[:]);
var pos [2]int
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
d1|=chk_encode(u[i][:],pos[:],du,ct,i)
}
d2:=chk_encode(v[:],pos[:],dv,ct[ciphertext_size-(dv*KY_DEGREE/8):ciphertext_size],0);
if (d1|d2)==0 {
return 0
} else {
return byte(0xff)
}
}
func cca_encrypt(params [6]int,randbytes32 []byte,pk []byte,ss []byte,ct []byte) {
var coins [32]byte
ck:=params[0]
du:=params[3]
dv:=params[4]
shared_secret_size:=params[5]
public_key_size:=32+ck*(KY_DEGREE*3)/2
ciphertext_size:=(du*ck+dv)*KY_DEGREE/8
sh := NewSHA3(SHA3_HASH256)
for i:=0;i<32;i++{
sh.Process(randbytes32[i])
}
hm := sh.Hash();
sh = NewSHA3(SHA3_HASH256)
for i:=0;i<public_key_size;i++ {
sh.Process(pk[i])
}
h := sh.Hash()
sh = NewSHA3(SHA3_HASH512);
for i:=0;i<32;i++ {
sh.Process(hm[i])
}
for i:=0;i<32;i++ {
sh.Process(h[i])
}
g:= sh.Hash()
for i:=0;i<32;i++ {
coins[i]=g[i+32]
}
cpa_encrypt(params,coins[:],pk,hm,ct)
sh = NewSHA3(SHA3_HASH256)
for i:=0;i<ciphertext_size;i++ {
sh.Process(ct[i])
}
h= sh.Hash();
sh = NewSHA3(SHA3_SHAKE256)
for i:=0;i<32;i++ {
sh.Process(g[i])
}
for i:=0;i<32;i++ {
sh.Process(h[i])
}
sh.Shake(ss[:],shared_secret_size)
}
func cpa_decrypt(params [6]int,SK []byte,CT []byte,SS []byte) {
ck:=params[0]
var w [KY_DEGREE]int16
var v [KY_DEGREE]int16
var r [KY_DEGREE]int16
var u= make([][KY_DEGREE]int16, ck)
var s= make([][KY_DEGREE]int16, ck)
du:=params[3]
dv:=params[4]
//shared_secret_size:=params[5]
var pos [2]int
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
decode(CT,du,u[i][:],pos[:])
}
decode(CT,dv,v[:],pos[:]);
for i:=0;i<ck;i++ {
decompress(u[i][:],du)
}
decompress(v[:],dv)
pos[0]=0; pos[1]=0
for i:=0;i<ck;i++ {
decode(SK,12,s[i][:],pos[:])
}
poly_ntt(u[0][:]);
poly_mul(w[:],u[0][:],s[0][:]);
for i:=1;i<ck;i++ {
poly_ntt(u[i][:])
poly_mul(r[:],u[i][:],s[i][:])
poly_add(w[:],w[:],r[:])
}
poly_reduce(w[:]);
poly_invntt(w[:]);
poly_sub(v[:],v[:],w[:]);
compress(v[:],1);
pos[0]=0; pos[1]=0;
encode(v[:],pos[:],1,SS,0);
}
func cca_decrypt(params [6]int,SK []byte,CT []byte,SS []byte) {
ck:=params[0]
du:=params[3]
dv:=params[4]
secret_cpa_key_size:=ck*(KY_DEGREE*3)/2
public_key_size:=32+ck*(KY_DEGREE*3)/2
shared_secret_size:=params[5]
ciphertext_size:=(du*ck+dv)*KY_DEGREE/8
var h [32]byte
var z [32]byte
var m [32]byte
var coins [32]byte
PK:=SK[secret_cpa_key_size:secret_cpa_key_size+public_key_size]
for i:=0;i<32;i++ {
h[i]=SK[secret_cpa_key_size+public_key_size+i]
}
for i:=0;i<32;i++ {
z[i]=SK[secret_cpa_key_size+public_key_size+32+i]
}
cpa_decrypt(params,SK,CT,m[:])
sh := NewSHA3(SHA3_HASH512)
for i:=0;i<32;i++ {
sh.Process(m[i])
}
for i:=0;i<32;i++ {
sh.Process(h[i])
}
g := sh.Hash()
for i:=0;i<32;i++ {
coins[i]=g[i+32]
}
mask:=cpa_check_encrypt(params,coins[:],PK,m[:],CT)
for i:=0;i<32;i++ {
g[i]^=(g[i]^z[i])&mask // substitute z for Kb on failure
}
sh = NewSHA3(SHA3_HASH256)
for i:=0;i<ciphertext_size;i++ {
sh.Process(CT[i])
}
hh:=sh.Hash()
sh = NewSHA3(SHA3_SHAKE256);
for i:=0;i<32;i++ {
sh.Process(g[i])
}
for i:=0;i<32;i++ {
sh.Process(hh[i])
}
sh.Shake(SS,shared_secret_size)
}
func KYBER_keypair512(r64 []byte,SK []byte,PK []byte) {
cca_keypair(PARAMS_512,r64,SK,PK)
}
func KYBER_encrypt512(r32 []byte,PK []byte,SS []byte,CT []byte) {
cca_encrypt(PARAMS_512,r32,PK,SS,CT)
}
func KYBER_decrypt512(SK []byte,CT []byte,SS []byte) {
cca_decrypt(PARAMS_512,SK,CT,SS)
}
func KYBER_keypair768(r64 []byte,SK []byte,PK []byte) {
cca_keypair(PARAMS_768,r64,SK,PK)
}
func KYBER_encrypt768(r32 []byte,PK []byte,SS []byte,CT []byte) {
cca_encrypt(PARAMS_768,r32,PK,SS,CT)
}
func KYBER_decrypt768(SK []byte,CT []byte,SS []byte) {
cca_decrypt(PARAMS_768,SK,CT,SS)
}
func KYBER_keypair1024(r64 []byte,SK []byte,PK []byte) {
cca_keypair(PARAMS_1024,r64,SK,PK)
}
func KYBER_encrypt1024(r32 []byte,PK []byte,SS []byte,CT []byte) {
cca_encrypt(PARAMS_1024,r32,PK,SS,CT)
}
func KYBER_decrypt1024(SK []byte,CT []byte,SS []byte) {
cca_decrypt(PARAMS_1024,SK,CT,SS)
}

View File

@ -1,540 +0,0 @@
package feldman
import (
"bytes"
"crypto/rand"
"crypto/sha256"
"fmt"
"source.quilibrium.com/quilibrium/ceremonyclient/ec/bls48581"
)
// This will not be used in the initial ceremony, but will be used in a end of ceremony event
type FeldmanECP struct {
threshold int
total int
id int
fragsForCounterparties map[int][]byte
fragsFromCounterparties map[int]*bls48581.BIG
zkpok *bls48581.BIG
secret *bls48581.BIG
scalar *bls48581.BIG
publicKey *bls48581.ECP
point *bls48581.ECP
randomCommitmentPoint *bls48581.ECP
round FeldmanRound
zkcommitsFromCounterparties map[int][]byte
pointsFromCounterparties map[int]*bls48581.ECP
}
type FeldmanReveal struct {
Point []byte
RandomCommitmentPoint []byte
ZKPoK []byte
}
type FeldmanECP8 struct {
threshold int
total int
id int
fragsForCounterparties map[int][]byte
fragsFromCounterparties map[int]*bls48581.BIG
zkpok *bls48581.BIG
secret *bls48581.BIG
scalar *bls48581.BIG
publicKey *bls48581.ECP8
point *bls48581.ECP8
randomCommitmentPoint *bls48581.ECP8
round FeldmanRound
zkcommitsFromCounterparties map[int][]byte
pointsFromCounterparties map[int]*bls48581.ECP8
}
type FeldmanRound int
const (
FELDMAN_ROUND_UNINITIALIZED = FeldmanRound(0)
FELDMAN_ROUND_INITIALIZED = FeldmanRound(1)
FELDMAN_ROUND_COMMITTED = FeldmanRound(2)
FELDMAN_ROUND_REVEALED = FeldmanRound(3)
FELDMAN_ROUND_RECONSTRUCTED = FeldmanRound(4)
)
func NewFeldmanECP(threshold, total, id int, secret *bls48581.BIG) (*FeldmanECP, error) {
return &FeldmanECP{
threshold: threshold,
total: total,
id: id,
fragsForCounterparties: make(map[int][]byte),
fragsFromCounterparties: make(map[int]*bls48581.BIG),
zkpok: nil,
secret: secret,
scalar: nil,
publicKey: bls48581.ECP_generator(),
point: bls48581.ECP_generator(),
round: FELDMAN_ROUND_UNINITIALIZED,
zkcommitsFromCounterparties: make(map[int][]byte),
pointsFromCounterparties: make(map[int]*bls48581.ECP),
}, nil
}
func NewFeldmanECP8(threshold, total, id int, secret *bls48581.BIG) (*FeldmanECP8, error) {
return &FeldmanECP8{
threshold: threshold,
total: total,
id: id,
fragsForCounterparties: make(map[int][]byte),
fragsFromCounterparties: make(map[int]*bls48581.BIG),
zkpok: nil,
secret: secret,
scalar: nil,
publicKey: bls48581.ECP8_generator(),
point: bls48581.ECP8_generator(),
round: FELDMAN_ROUND_UNINITIALIZED,
zkcommitsFromCounterparties: make(map[int][]byte),
pointsFromCounterparties: make(map[int]*bls48581.ECP8),
}, nil
}
func (f *FeldmanECP) SamplePolynomial() {
coeffs := append([]*bls48581.BIG{}, f.secret)
for i := 0; i < f.threshold-1; i++ {
secretBytes := make([]byte, int(bls48581.MODBYTES))
rand.Read(secretBytes)
secret := bls48581.FromBytes(secretBytes)
coeffs = append(coeffs, secret)
}
for i := 1; i <= f.total; i++ {
result := coeffs[len(coeffs)-1]
for j := len(coeffs) - 2; j >= 0; j-- {
result = bls48581.Modadd(
coeffs[j],
bls48581.Modmul(
result,
bls48581.NewBIGint(i),
bls48581.NewBIGints(bls48581.CURVE_Order),
),
bls48581.NewBIGints(bls48581.CURVE_Order),
)
}
if i == f.id {
f.scalar = result
} else {
fragBytes := make([]byte, int(bls48581.MODBYTES))
result.ToBytes(fragBytes)
f.fragsForCounterparties[i] = fragBytes
}
}
f.round = FELDMAN_ROUND_INITIALIZED
}
func (f *FeldmanECP) Scalar() *bls48581.BIG {
return f.scalar
}
func (f *FeldmanECP) GetPolyFrags() map[int][]byte {
return f.fragsForCounterparties
}
func (f *FeldmanECP) SetPolyFragForParty(id int, frag []byte) []byte {
f.fragsFromCounterparties[id] = bls48581.FromBytes(frag)
if len(f.fragsFromCounterparties) == f.total-1 {
for _, v := range f.fragsFromCounterparties {
f.scalar = bls48581.Modadd(f.scalar, v, bls48581.NewBIGints(bls48581.CURVE_Order))
}
f.point = f.point.Mul(f.scalar)
randCommitmentBytes := make([]byte, int(bls48581.MODBYTES))
rand.Read(randCommitmentBytes)
randCommitment := bls48581.FromBytes(randCommitmentBytes)
f.randomCommitmentPoint = f.publicKey.Mul(randCommitment)
publicPointBytes := make([]byte, bls48581.MODBYTES+1)
randCommitmentPointBytes := make([]byte, bls48581.MODBYTES+1)
f.point.ToBytes(publicPointBytes, true)
f.randomCommitmentPoint.ToBytes(randCommitmentPointBytes, true)
challenge := sha256.Sum256(append(append([]byte{}, publicPointBytes...), randCommitmentPointBytes...))
challengeBig := bls48581.FromBytes(challenge[:])
challengeBig.Mod(bls48581.NewBIGints(bls48581.CURVE_Order))
f.zkpok = bls48581.Modadd(
bls48581.Modmul(
f.scalar,
challengeBig,
bls48581.NewBIGints(bls48581.CURVE_Order),
),
randCommitment,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
zkpokBytes := make([]byte, int(bls48581.MODBYTES))
f.zkpok.ToBytes(zkpokBytes)
zkcommit := sha256.Sum256(append(append([]byte{}, randCommitmentPointBytes...), zkpokBytes...))
f.round = FELDMAN_ROUND_COMMITTED
return zkcommit[:]
}
return []byte{}
}
func (f *FeldmanECP) ReceiveCommitments(id int, zkcommit []byte) *FeldmanReveal {
f.zkcommitsFromCounterparties[id] = zkcommit
if len(f.zkcommitsFromCounterparties) == f.total-1 {
publicPointBytes := make([]byte, bls48581.MODBYTES+1)
randCommitmentPointBytes := make([]byte, bls48581.MODBYTES+1)
f.point.ToBytes(publicPointBytes, true)
f.randomCommitmentPoint.ToBytes(randCommitmentPointBytes, true)
f.round = FELDMAN_ROUND_REVEALED
zkpokBytes := make([]byte, int(bls48581.MODBYTES))
f.zkpok.ToBytes(zkpokBytes)
return &FeldmanReveal{
Point: publicPointBytes,
RandomCommitmentPoint: randCommitmentPointBytes,
ZKPoK: zkpokBytes,
}
}
return nil
}
func (f *FeldmanECP) Recombine(id int, reveal *FeldmanReveal) {
counterpartyPoint := bls48581.ECP_fromBytes(reveal.Point)
if counterpartyPoint.Equals(bls48581.ECP_generator()) {
fmt.Printf("invalid point from %d", id)
return
}
counterpartyRandomCommitmentPoint := bls48581.ECP_fromBytes(reveal.RandomCommitmentPoint)
if counterpartyRandomCommitmentPoint.Equals(bls48581.ECP_generator()) {
fmt.Printf("invalid commitment point from %d", id)
return
}
counterpartyZKPoK := bls48581.FromBytes(reveal.ZKPoK)
counterpartyZKCommit := f.zkcommitsFromCounterparties[id]
challenge := sha256.Sum256(append(append([]byte{}, reveal.Point...), reveal.RandomCommitmentPoint...))
challengeBig := bls48581.FromBytes(challenge[:])
challengeBig.Mod(bls48581.NewBIGints(bls48581.CURVE_Order))
proof := f.publicKey.Mul(counterpartyZKPoK)
counterpartyRandomCommitmentPoint.Add(counterpartyPoint.Mul(challengeBig))
if !proof.Equals(counterpartyRandomCommitmentPoint) {
fmt.Printf("invalid proof from %d", id)
return
}
verifier := sha256.Sum256(append(append([]byte{}, reveal.RandomCommitmentPoint...), reveal.ZKPoK...))
if !bytes.Equal(counterpartyZKCommit, verifier[:]) {
fmt.Printf("%d changed zkpok after commit", id)
return
}
f.pointsFromCounterparties[id] = counterpartyPoint
if len(f.pointsFromCounterparties) == f.total-1 {
f.pointsFromCounterparties[f.id] = f.point
for i := 1; i <= f.total-f.threshold; i++ {
reconstructedSum := bls48581.ECP_generator()
for j := i; j <= f.threshold+i; j++ {
coefficientNumerator := bls48581.NewBIGint(1)
coefficientDenominator := bls48581.NewBIGint(1)
for k := i; k <= f.threshold+i; k++ {
if j != k {
k := bls48581.NewBIGint(k)
coefficientNumerator = bls48581.Modmul(
coefficientNumerator,
k,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
kj := bls48581.Modadd(
k,
bls48581.NewBIGints(bls48581.CURVE_Order).Minus(bls48581.NewBIGint(j)),
bls48581.NewBIGints(bls48581.CURVE_Order),
)
coefficientDenominator = bls48581.Modmul(
coefficientDenominator,
kj,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
}
}
coefficientDenominator.Invmodp(bls48581.NewBIGints(bls48581.CURVE_Order))
reconstructedFragment := f.pointsFromCounterparties[j].Mul(
bls48581.Modmul(
coefficientNumerator,
coefficientDenominator,
bls48581.NewBIGints(bls48581.CURVE_Order),
),
)
if reconstructedSum.Equals(bls48581.ECP_generator()) {
reconstructedSum = reconstructedFragment
} else {
reconstructedSum.Add(reconstructedFragment)
}
}
if f.publicKey.Equals(bls48581.ECP_generator()) {
f.publicKey = reconstructedSum
} else if !f.publicKey.Equals(reconstructedSum) {
fmt.Println("key mismatch")
fmt.Println(f.publicKey.ToString())
fmt.Println(reconstructedSum.ToString())
return
}
}
f.round = FELDMAN_ROUND_RECONSTRUCTED
}
}
func (f *FeldmanECP) PublicKey() *bls48581.ECP {
key := bls48581.NewECP()
key.Copy(f.publicKey)
return key
}
func (f *FeldmanECP) PublicKeyBytes() []byte {
publicKeyBytes := make([]byte, bls48581.MODBYTES+1)
f.publicKey.ToBytes(publicKeyBytes, true)
return publicKeyBytes
}
func (f *FeldmanECP8) SamplePolynomial() {
coeffs := append([]*bls48581.BIG{}, f.secret)
for i := 0; i < f.threshold-1; i++ {
secretBytes := make([]byte, int(bls48581.MODBYTES))
rand.Read(secretBytes)
secret := bls48581.FromBytes(secretBytes)
coeffs = append(coeffs, secret)
}
for i := 1; i <= f.total; i++ {
result := coeffs[len(coeffs)-1]
for j := len(coeffs) - 2; j >= 0; j-- {
result = bls48581.Modadd(
coeffs[j],
bls48581.Modmul(
result,
bls48581.NewBIGint(i),
bls48581.NewBIGints(bls48581.CURVE_Order),
),
bls48581.NewBIGints(bls48581.CURVE_Order),
)
}
if i == f.id {
f.scalar = result
} else {
fragBytes := make([]byte, int(bls48581.MODBYTES))
result.ToBytes(fragBytes)
f.fragsForCounterparties[i] = fragBytes
}
}
f.round = FELDMAN_ROUND_INITIALIZED
}
func (f *FeldmanECP8) GetPolyFrags() map[int][]byte {
return f.fragsForCounterparties
}
func (f *FeldmanECP8) SetPolyFragForParty(id int, frag []byte) []byte {
f.fragsFromCounterparties[id] = bls48581.FromBytes(frag)
if len(f.fragsFromCounterparties) == f.total-1 {
for _, v := range f.fragsFromCounterparties {
f.scalar = bls48581.Modadd(f.scalar, v, bls48581.NewBIGints(bls48581.CURVE_Order))
}
f.point = f.point.Mul(f.scalar)
randCommitmentBytes := make([]byte, int(bls48581.MODBYTES))
rand.Read(randCommitmentBytes)
randCommitment := bls48581.FromBytes(randCommitmentBytes)
f.randomCommitmentPoint = f.publicKey.Mul(randCommitment)
publicPointBytes := make([]byte, bls48581.MODBYTES*8+1)
randCommitmentPointBytes := make([]byte, bls48581.MODBYTES*8+1)
f.point.ToBytes(publicPointBytes, true)
f.randomCommitmentPoint.ToBytes(randCommitmentPointBytes, true)
challenge := sha256.Sum256(append(append([]byte{}, publicPointBytes...), randCommitmentPointBytes...))
challengeBig := bls48581.FromBytes(challenge[:])
challengeBig.Mod(bls48581.NewBIGints(bls48581.CURVE_Order))
f.zkpok = bls48581.Modadd(
bls48581.Modmul(
f.scalar,
challengeBig,
bls48581.NewBIGints(bls48581.CURVE_Order),
),
randCommitment,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
zkpokBytes := make([]byte, int(bls48581.MODBYTES))
f.zkpok.ToBytes(zkpokBytes)
zkcommit := sha256.Sum256(append(append([]byte{}, randCommitmentPointBytes...), zkpokBytes...))
f.round = FELDMAN_ROUND_COMMITTED
return zkcommit[:]
}
return []byte{}
}
func (f *FeldmanECP8) Scalar() *bls48581.BIG {
return f.scalar
}
func (f *FeldmanECP8) ReceiveCommitments(id int, zkcommit []byte) *FeldmanReveal {
f.zkcommitsFromCounterparties[id] = zkcommit
if len(f.zkcommitsFromCounterparties) == f.total-1 {
publicPointBytes := make([]byte, bls48581.MODBYTES*8+1)
randCommitmentPointBytes := make([]byte, bls48581.MODBYTES*8+1)
f.point.ToBytes(publicPointBytes, true)
f.randomCommitmentPoint.ToBytes(randCommitmentPointBytes, true)
f.round = FELDMAN_ROUND_REVEALED
zkpokBytes := make([]byte, int(bls48581.MODBYTES))
f.zkpok.ToBytes(zkpokBytes)
return &FeldmanReveal{
Point: publicPointBytes,
RandomCommitmentPoint: randCommitmentPointBytes,
ZKPoK: zkpokBytes,
}
}
return nil
}
func (f *FeldmanECP8) Recombine(id int, reveal *FeldmanReveal) {
counterpartyPoint := bls48581.ECP8_fromBytes(reveal.Point)
if counterpartyPoint.Equals(bls48581.ECP8_generator()) {
fmt.Printf("invalid point from %d", id)
return
}
counterpartyRandomCommitmentPoint := bls48581.ECP8_fromBytes(reveal.RandomCommitmentPoint)
if counterpartyRandomCommitmentPoint.Equals(bls48581.ECP8_generator()) {
fmt.Printf("invalid commitment point from %d", id)
return
}
counterpartyZKPoK := bls48581.FromBytes(reveal.ZKPoK)
counterpartyZKCommit := f.zkcommitsFromCounterparties[id]
challenge := sha256.Sum256(append(append([]byte{}, reveal.Point...), reveal.RandomCommitmentPoint...))
challengeBig := bls48581.FromBytes(challenge[:])
challengeBig.Mod(bls48581.NewBIGints(bls48581.CURVE_Order))
proof := f.publicKey.Mul(counterpartyZKPoK)
counterpartyRandomCommitmentPoint.Add(counterpartyPoint.Mul(challengeBig))
if !proof.Equals(counterpartyRandomCommitmentPoint) {
fmt.Printf("invalid proof from %d", id)
return
}
verifier := sha256.Sum256(append(append([]byte{}, reveal.RandomCommitmentPoint...), reveal.ZKPoK...))
if !bytes.Equal(counterpartyZKCommit, verifier[:]) {
fmt.Printf("%d changed zkpok after commit", id)
return
}
f.pointsFromCounterparties[id] = counterpartyPoint
if len(f.pointsFromCounterparties) == f.total-1 {
f.pointsFromCounterparties[f.id] = f.point
for i := 1; i <= f.total-f.threshold; i++ {
reconstructedSum := bls48581.ECP8_generator()
for j := i; j <= f.threshold+i; j++ {
coefficientNumerator := bls48581.NewBIGint(1)
coefficientDenominator := bls48581.NewBIGint(1)
for k := i; k <= f.threshold+i; k++ {
if j != k {
k := bls48581.NewBIGint(k)
coefficientNumerator = bls48581.Modmul(
coefficientNumerator,
k,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
kj := bls48581.Modadd(
k,
bls48581.NewBIGints(bls48581.CURVE_Order).Minus(bls48581.NewBIGint(j)),
bls48581.NewBIGints(bls48581.CURVE_Order),
)
coefficientDenominator = bls48581.Modmul(
coefficientDenominator,
kj,
bls48581.NewBIGints(bls48581.CURVE_Order),
)
}
}
coefficientDenominator.Invmodp(bls48581.NewBIGints(bls48581.CURVE_Order))
reconstructedFragment := f.pointsFromCounterparties[j].Mul(
bls48581.Modmul(
coefficientNumerator,
coefficientDenominator,
bls48581.NewBIGints(bls48581.CURVE_Order),
),
)
if reconstructedSum.Equals(bls48581.ECP8_generator()) {
reconstructedSum = reconstructedFragment
} else {
reconstructedSum.Add(reconstructedFragment)
}
}
if f.publicKey.Equals(bls48581.ECP8_generator()) {
f.publicKey = reconstructedSum
} else if !f.publicKey.Equals(reconstructedSum) {
fmt.Println("key mismatch")
fmt.Println(f.publicKey.ToString())
fmt.Println(reconstructedSum.ToString())
return
}
}
f.round = FELDMAN_ROUND_RECONSTRUCTED
}
}
func (f *FeldmanECP8) PublicKey() *bls48581.ECP8 {
key := bls48581.NewECP8()
key.Copy(f.publicKey)
return key
}
func (f *FeldmanECP8) PublicKeyBytes() []byte {
publicKeyBytes := make([]byte, bls48581.MODBYTES*8+1)
f.publicKey.ToBytes(publicKeyBytes, true)
return publicKeyBytes
}

11
go.mod
View File

@ -1,10 +1,3 @@
module source.quilibrium.com/quilibrium/ceremonyclient
module source.quilibrium.com/quilibrium/monorepo
go 1.18
require github.com/cloudflare/circl v1.3.2
require (
golang.org/x/sync v0.1.0 // indirect
golang.org/x/sys v0.6.0 // indirect
)
go 1.18

6
go.sum
View File

@ -1,6 +0,0 @@
github.com/cloudflare/circl v1.3.2 h1:VWp8dY3yH69fdM7lM6A1+NhhVoDu9vqK0jOgmkQHFWk=
github.com/cloudflare/circl v1.3.2/go.mod h1:+CauBF6R70Jqcyl8N2hC8pAXYbWkGIezuSbuGLtRhnw=
golang.org/x/sync v0.1.0 h1:wsuoTGHzEhffawBOhz5CYhcrV4IdKZbEyZjBMuTp12o=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=

110
main.go
View File

@ -1,110 +0,0 @@
package main
import (
"fmt"
"os"
"time"
)
var HOST string = "https://ceremony.quilibrium.com:8443/"
func main() {
PrintLogo()
PrintVersion()
ParseArgs()
}
func ParseArgs() {
if len(os.Args) > 1 {
switch os.Args[1] {
case "verify-transcript":
VerifyState()
os.Exit(0)
case "check-voucher":
CheckVoucherInclusion(os.Args[2])
os.Exit(0)
default:
break
}
}
WaitForSequencerToBeReady()
JoinLobby()
Bootstrap()
fmt.Println("New PoT Pubkey: ")
fmt.Println(bcj.PotPubKey)
fmt.Println()
fmt.Println("Voucher Pubkey: ")
fmt.Println(bcj.VoucherPubKey)
ContributeAndGetVoucher()
}
func WaitForSequencerToBeReady() {
spinnerChars := []string{"⣾", "⣽", "⣻", "⢿", "⡿", "⣟", "⣯", "⣷"}
spinnerIndex := 0
attempts := 0
removeLine := "\u001B[A\u001B[2K"
state := GetSequencerState()
for state != SEQUENCER_ACCEPTING {
message := "Sequencer currently not accepting new contributions, waiting..."
status := fmt.Sprintf("[Attempt %d - Last Checked: %s]", attempts, time.Now().String())
fmt.Printf("\r%s", removeLine)
fmt.Printf("%s\n", message+spinnerChars[spinnerIndex])
fmt.Printf(" |- %s", status)
spinnerIndex = (spinnerIndex + 1) % len(spinnerChars)
attempts += 1
time.Sleep(5 * time.Second)
state = GetSequencerState()
}
fmt.Println()
fmt.Println("Sequencer is ready for contributions!")
}
func PrintLogo() {
fmt.Println()
fmt.Println(" %#########")
fmt.Println(" #############################")
fmt.Println(" ########################################&")
fmt.Println(" ###############################################")
fmt.Println(" &#####################% %######################")
fmt.Println(" ################# #################")
fmt.Println(" ############### ###############")
fmt.Println(" ############# ##############")
fmt.Println(" ############# ############&")
fmt.Println(" ############ ############")
fmt.Println(" ########### ########## &###########")
fmt.Println(" ########### ############## ###########")
fmt.Println(" ########### ############## ##########&")
fmt.Println(" ########## ############## ##########")
fmt.Println("%########## ########## ##########")
fmt.Println("##########& ##########")
fmt.Println("########## &#########")
fmt.Println("##########& ####### ####### ##########")
fmt.Println(" ########## &######################### ##########")
fmt.Println(" ########## ##############% ############## &##########")
fmt.Println(" %########## &############## ############### ##########")
fmt.Println(" ########### ############### ##############% ###########")
fmt.Println(" ###########& ########## ############### ########")
fmt.Println(" ############ ##### ##############% ####")
fmt.Println(" ############ ###############")
fmt.Println(" ############## ##############%")
fmt.Println(" ############### ###############")
fmt.Println(" #################& ##############%")
fmt.Println(" #########################&&&############# ###############")
fmt.Println(" ########################################% ############")
fmt.Println(" ####################################### ########")
fmt.Println(" ############################# ##")
}
func PrintVersion() {
fmt.Println(" ")
fmt.Println(" Quilibrium Ceremony Client - CLI - v1.0.2")
fmt.Println()
fmt.Println()
}

19
nekryptology/.gitignore vendored Normal file
View File

@ -0,0 +1,19 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/
.idea/
.DS_Store
vendor/

586
nekryptology/.golangci.yml Normal file
View File

@ -0,0 +1,586 @@
run:
tests: true
timeout: 5m
build-tags:
- integration
skip-dirs:
- gen/
output:
format: colored-line-number
sort-results: true
# https://golangci-lint.run/usage/linters
linters:
disable-all: true
fast: false
enable:
# Simple linter to check that your code does not contain non-ASCII identifiers
# - asciicheck
# Finds unused code
- deadcode
# Checks assignments with too many blank identifiers (e.g. x, , , _, := f())
- dogsled
# Tool for code clone detection
# TODO: Enable this later
# - dupl
# check for two durations multiplied together
- durationcheck
# Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases
- errcheck
# errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
- errorlint
# check exhaustiveness of enum switch statements
- exhaustive
# checks for pointers to enclosing loop variables
- exportloopref
# # Forbids identifiers
# - forbidigo
# # finds forced type assertions
# - forcetypeassert
# Gci control golang package import order and make it always deterministic.
- gci
# # Checks that no init functions are present in Go code
# - gochecknoinits
# # Finds repeated strings that could be replaced by a constant
# - goconst
# # Provides many diagnostics that check for bugs, performance and style issues. Extensible without recompilation through dynamic rules. Dynamic rules are written declaratively with AST patterns, filters, report message and optional suggestion.
# - gocritic
# # Check if comments end in a period
# - godot
# # Golang linter to check the errors handling expressions
# - goerr113
# # Gofumpt checks whether code was gofumpt-ed.
# - gofumpt
# # An analyzer to detect magic numbers.
# - gomnd
# # Checks that printf-like functions are named with f at the end
# - goprintffuncname
# # Inspects source code for security problems
# - gosec
# # Linter for Go source code that specializes in simplifying a code
# - gosimple
# # Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
# - govet
# # Checks that your code uses short syntax for if-statements whenever possible
# - ifshort
# # Enforces consistent import aliases
# - importas
# # Detects when assignments to existing variables are not used
# - ineffassign
# # Finds slice declarations with non-zero initial length
# - makezero
# # Finds commonly misspelled English words in comments
# - misspell
# # Finds naked returns in functions greater than a specified function length
# - nakedret
# # Reports deeply nested if statements
# - nestif
# # Finds the code that returns nil even if it checks that the error is not nil.
# - nilerr
# # Reports ill-formed or insufficient nolint directives
# - nolintlint
# # Finds slice declarations that could potentially be preallocated
# - prealloc
# # find code that shadows one of Go's predeclared identifiers
# - predeclared
# # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
# - revive
# # Staticcheck is a go vet on steroids, applying a ton of static analysis checks
# - staticcheck
# # Finds unused struct fields
# - structcheck
# # linter that makes you use a separate _test package
# - testpackage
# # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers
# - thelper
# # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
# - tparallel
# # Like the front-end of a Go compiler, parses and type-checks Go code
# - typecheck
# # Remove unnecessary type conversions
# - unconvert
# # Reports unused function parameters
# - unparam
# # Checks Go code for unused constants, variables, functions and types
# - unused
# # Finds unused global variables and constants
# - varcheck
# # wastedassign finds wasted assignment statements.
# - wastedassign
# # Tool for detection of leading and trailing whitespace
# - whitespace
# # Checks that errors returned from external packages are wrapped
# - wrapcheck
linters-settings:
errcheck:
# # report about not checking of errors in type assertions: `a := b.(MyStruct)`;
# # default is false: such cases aren't reported by default.
check-type-assertions: true
# # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`;
# # default is false: such cases aren't reported by default.
check-blank: false
# goconst:
# # minimal length of string constant, 3 by default
# min-len: 3
# # minimal occurrences count to trigger, 3 by default
# min-occurrences: 3
# gosimple:
# go: "1.17"
# checks: ["all"]
# predeclared:
# # include method names and field names (i.e., qualified names) in checks
# q: true
# staticcheck:
# go: "1.17"
# # https://staticcheck.io/docs/options#checks
# checks: ["all"]
# unused:
# go: "1.17"
# testpackage:
# # regexp pattern to skip files
# # We're skipping securechannel tests - and keep them as internal tests
# # because other wise we'd have to export a lot of unnecessary things out of
# # the package.
# skip-regexp: (groupchannel|peerchannel)_test\.go
# misspell:
# # Correct spellings using locale preferences for US or UK.
# # Default is to use a neutral variety of English.
# # Setting locale to US will correct the British spelling of 'colour' to 'color'.
# locale: US
# ignore-words:
# - cancelled
# godot:
# # comments to be checked: `declarations`, `toplevel`, or `all`
# scope: toplevel
# # list of regexps for excluding particular comment lines from check
# exclude:
# - '^ [=-_]+$' # contain line comments eg '// ============'
# - '^ [A-Za-z-_]+$' # contains only a single word eg '// Cosigners'
# - '^ [A-Za-z-_]+ [A-Za-z-_]+$' # contains only two words eg '// Server Sage'
# - '[TODO|FIXME]:' # contains TODO or FIXME keywords
# - '[:]+' # contain a column eg. comments like '// Usage: ...' or comments containing an emoji
# # don't enforce comments starting with capital letters because the first word might be
# # referring to an argument within the function or alike
# capital: false
# whitespace:
# multi-if: true # Enforces newlines (or comments) after every multi-line if statement
# multi-func: false # Enforces newlines (or comments) after every multi-line function signature
# prealloc:
# # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. true by default
# simple: true
# range-loops: true # Report preallocation suggestions on range loops, true by default
# for-loops: true # Report preallocation suggestions on for loops, false by default
errorlint:
# Check whether fmt.Errorf uses the %w verb for formatting errors. keep in mind that any errors wrapped by fmt.Errorf implicitly become part of the API as according to Hyrum's Law.
errorf: true
# Check for plain type assertions and type switches
asserts: true
# Check for plain error comparisons
comparison: true
exhaustive:
# indicates that switch statements are to be considered exhaustive if a
# 'default' case is present, even if all enum members aren't listed in the
# switch
default-signifies-exhaustive: false
gci:
# put imports beginning with prefix after 3rd-party packages;
# only support one prefix
# if not set, use goimports.local-prefixes
local-prefixes: github.com
# gofumpt:
# # Choose whether or not to use the extra rules that are disabled
# # by default
# # list of rules: https://github.com/mvdan/gofumpt
# extra-rules: true
# forbidigo:
# # Forbid the following identifiers (identifiers are written using regexp):
# forbid:
# - 'fmt\.Print.*'
# - 'pubkey'
# - 'privkey'
# # Exclude godoc examples from forbidigo checks. Default is true.
# exclude_godoc_examples: true
dogsled:
# checks assignments with too many blank identifiers; default is 2
max-blank-identifiers: 3
# gomnd: # Detection methods of magic numbers
# # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description.
# checks: argument,case,condition,operation,return,assign
# ignored-numbers:
# - '2' # We often use 2 in the context of two-party stuff (e.g. DKLs18)
# - '10' # Parsing integers in base 10 is common
# - '64' # Parsing integers into 64 bit integers
# dupl:
# # tokens count to trigger issue, 150 by default
# threshold: 150
# revive:
# rules:
# # Disallows the usage of basic types in context.WithValue.
# - name: context-keys-type
# # Conventions around the naming of time variables.
# - name: time-naming
# # whitelist & blacklist of initialisms
# - name: var-naming
# arguments:
# # whitelist - we do not enforce consistent capitalization in naming of the following initialisms
# - ["ID"]
# # Blacklist - opposite of whitelist
# - ["KMS", "RSA", "HSM", "ECDH", "ECIES", "JSON", "URL", "PublicKey", "PrivateKey", "UUID", "HKDF", "TLS", "SQS", "SNS", "S2S"]
# # Reduces redundancies around variable declaration.
# - name: var-declaration
# # Warns when a public return is from unexported type.
# - name: unexported-return
# # Disallows blank imports
# - name: blank-imports
# # context.Context should be the first argument of a function.
# - name: context-as-argument
# # Forbids . imports.
# - name: dot-imports
# # The error return parameter should be last.
# - name: error-return
# # Conventions around error strings.
# - name: error-strings
# # Naming of error variables.
# - name: error-naming
# # Naming and commenting conventions on exported symbols.
# - name: exported
# # Redundant if when returning an error.
# - name: if-return
# # Use i++ and i-- instead of i += 1 and i -= 1.
# - name: increment-decrement
# # Package commenting conventions.
# - name: package-comments
# # Prevents redundant variables when iterating over a collection.
# - name: range
# # Conventions around the naming of receivers.
# - name: receiver-naming
# # Sets restriction for maximum Cyclomatic complexity.
# # TODO: enable this once all cyclomatic complexity todos are resolved
# # - name: cyclomatic
# # arguments:
# # - 13
# # Warns on empty code blocks
# - name: empty-block
# # Prevents redundant else statements (extends indent-error-flow)
# - name: superfluous-else
# # Warns on methods with names that differ only by capitalization
# - name: confusing-naming
# # Warns on getters that do not yield any result
# - name: get-return
# # Warns on assignments to function parameters
# - name: modifies-parameter
# # Suggests to name potentially confusing function results
# - name: confusing-results
# # Suggests removing or simplifying unnecessary statements
# - name: unnecessary-stmt
# # Checks common struct tags like json,xml,yaml
# - name: struct-tag
# # Warns on assignments to value-passed method receivers
# - name: modifies-value-receiver
# # Warns on constant logical expressions
# - name: constant-logical-expr
# # Suggests removing Boolean literals from logic expressions
# - name: bool-literal-in-expr
# # Warns on redefinitions of builtin identifiers
# - name: redefines-builtin-id
# # Warns if range value is used in a closure dispatched as goroutine
# - name: range-val-in-closure
# # Warns if address of range value is used dangerously
# - name: range-val-address
# # Warns on functions taking sync.WaitGroup as a by-value parameter
# - name: waitgroup-by-value
# # Warns on explicit call to the garbage collector
# - name: call-to-gc
# # Looks for packages that are imported two or more times
# - name: duplicated-imports
# # Spots identifiers that shadow an import
# - name: import-shadowing
# # Warns on bare returns
# - name: bare-return
# # Suggests to rename or remove unused method receivers
# - name: unused-receiver
# # Warns on suspicious casts from int to string
# - name: string-of-int
# # Spots if-then-else statements that can be refactored to simplify code reading
# - name: early-return
# # Warns on function calls that will lead to (direct) infinite recursion
# - name: unconditional-recursion
# # Spots if-then-else statements with identical then and else branches
# - name: identical-branches
# # Warns on some defer gotchas: https://blog.learngoprogramming.com/5-gotchas-of-defer-in-go-golang-part-iii-36a1ab3d6ef1
# - name: defer
# arguments:
# # call-chain: even if deferring call-chains of the form foo()() is valid, it does not helps code understanding (only the last call is deferred)
# # loop: deferring inside loops can be misleading (deferred functions are not executed at the end of the loop iteration but of the current function) and it could lead to exhausting the execution stack
# # method-call: deferring a call to a method can lead to subtle bugs if the method does not have a pointer receiver
# # recover: calling recover outside a deferred function has no effect
# # return: returning values form a deferred function has no effect
# - ["call-chain", "method-call", "return"]
# # Warns on wrongly named un-exported symbols
# - name: unexported-naming
# govet:
# # report about shadowed variables
# check-shadowing: true
# disable-all: true
# enable: # run `go tool vet help` to see all analyzers
# # report mismatches between assembly files and Go declarations
# - asmdecl
# # check for common mistakes using the sync/atomic package
# - atomic
# # check for common mistakes involving boolean operators
# - bools
# # check that +build tags are well-formed and correctly located
# - buildtag
# # detect some violations of the cgo pointer passing rules
# - cgocall
# # check for unkeyed composite literals
# - composites
# # check for locks erroneously passed by value
# - copylocks
# # report passing non-pointer or non-error values to errors.As
# - errorsas
# # report assembly that clobbers the frame pointer before saving it
# - framepointer
# # detect impossible interface-to-interface type assertions
# - ifaceassert
# # check references to loop variables from within nested functions
# - loopclosure
# # check cancel func returned by context.WithCancel is called
# - lostcancel
# # check for useless comparisons between functions and nil
# - nilfunc
# # check consistency of Printf format strings and arguments
# - printf
# # check for shifts that equal or exceed the width of the integer
# - shift
# # check signature of methods of well-known interfaces
# - stdmethods
# # check for string(int) conversions
# - stringintconv
# # check that struct field tags conform to reflect.StructTag.Get
# - structtag
# # report calls to (*testing.T).Fatal from goroutines started by a test.
# - testinggoroutine
# # check for common mistaken usages of tests and examples
# - tests
# # report passing non-pointer or non-interface values to unmarshal
# - unmarshal
# # check for unreachable code
# - unreachable
# # check for invalid conversions of uintptr to unsafe.Pointer
# - unsafeptr
# # check for unused results of calls to some functions
# - unusedresult
# gocritic:
# # Which checks should be enabled; can't be combined with 'disabled-checks';
# # See https://go-critic.github.io/overview#checks-overview
# # To check which checks are enabled run `GL_DEBUG=gocritic golangci-lint run`
# # By default list o stable checks is used.
# enabled-checks:
# # Detects suspicious mutex lock/unlock operations
# - badLock
# # Detects suspicious regexp patterns
# - badRegexp
# # Detects suspicious duplicated arguments
# - dupArg
# # Detects duplicated branch bodies inside conditional statements
# - dupBranchBody
# # Detects suspicious duplicated sub-expressions
# - dupSubExpr
# # Detects unwanted dependencies on the evaluation order
# - evalOrder
# # Detects calls to exit/fatal inside functions that use defer
# - exitAfterDefer
# # Detects problems in filepath.Join() function calls
# - filepathJoin
# # Detects immediate dereferencing of `flag` package pointers
# - flagDeref
# # Detects suspicious flag names
# - flagName
# # Detects suspicious map literal keys
# - mapKey
# # Detects return statements those results evaluate to nil
# - nilValReturn
# # Detects octal literals passed to functions
# - octalLiteral
# # Detects various off-by-one kind of errors
# - offBy1
# # Detects suspicious regexp patterns
# - regexpPattern
# # Detects suspicious/confusing re-assignments
# - sloppyReassign
# # Detects suspicious sort.Slice calls
# - sortSlice
# # Detects potential truncation issues when comparing ints of different sizes
# - truncateCmp
# # Detects redundantly deferred calls
# - unnecessaryDefer
# # Detects conditions that are unsafe due to not being exhaustive
# - weakCond
# # Detects assignments that can be simplified by using assignment operators
# - assignOp
# # Detects bool expressions that can be simplified
# - boolExprSimplify
# # Detects capitalized names for local variables
# - captLocal
# # Detects commented-out imports
# - commentedOutImport
# # Detects when default case in switch isn't on 1st or last position
# - defaultCaseOrder
# # Detects deferred function literals that can be simplified
# - deferUnlambda
# # Detects comments that silence go lint complaints about doc-comment
# - docStub
# # Detects multiple imports of the same package under different aliases
# - dupImport
# # Detects fallthrough that can be avoided by using multi case values
# - emptyFallthrough
# # Detects empty string checks that can be written more idiomatically
# - emptyStringTest
# # Detects hex literals that have mixed case letter digits
# - hexLiteral
# # Detects non-assignment statements inside if/switch init clause
# - initClause
# # Detects method expression call that can be replaced with a method call
# - methodExprCall
# # Finds where nesting level could be reduced
# - nestingReduce
# # Detects immediate dereferencing of `new` expressions
# - newDeref
# # Detects if function parameters could be combined by type and suggest the way to do it
# - paramTypeCombine
# # Detects input and output parameters that have a type of pointer to referential type
# - ptrToRefParam
# # Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`
# - regexpMust
# # Detects regexp patterns that can be simplified
# - regexpSimplify
# # Runs user-defined rules using ruleguard linter
# # TODO: enable later, to detect coordinator leaks any info at the network boundary
# # - ruleguard
# # Detects usage of `len` when result is obvious or doesn't make sense
# - sloppyLen
# # Detects redundant conversions between string and []byte
# - stringXbytes
# # Detects switch-over-bool statements that use explicit `true` tag value
# - switchTrue
# # Detects repeated type assertions and suggests to replace them with type switch statement
# - typeAssertChain
# # Detects method declarations preceding the type definition itself
# - typeDefFirst
# # Detects type switches that can benefit from type guard clause with variable
# - typeSwitchVar
# # Detects unneded parenthesis inside type expressions and suggests to remove them
# - typeUnparen
# # Detects dereference expressions that can be omitted
# - underef
# # Detects redundant statement labels
# - unlabelStmt
# # Detects function literals that can be simplified
# - unlambda
# # Detects unnecessary braced statement blocks
# - unnecessaryBlock
# # Detects slice expressions that can be simplified to sliced expression itself
# - unslice
# # Detects value swapping code that are not using parallel assignment
# - valSwap
# # Ensures that `//nolint` comments include an explanation
# - whyNoLint
# # Detects function calls that can be replaced with convenience wrappers
# - wrapperFunc
# # Detects Yoda style expressions and suggests to replace them eg. `return nil != ptr to` to `return ptr != nil`
# - yodaStyleExpr
# # Detects `append` chains to the same slice that can be done in a single `append` call
# - appendCombine
# # Detects unoptimal strings/bytes case-insensitive comparison
# - equalFold
# # Detects params that incur excessive amount of copying
# - hugeParam
# # Detects strings.Index calls that may cause unwanted allocs
# - indexAlloc
# # Detects expensive copies of `for` loop range expressions
# - rangeExprCopy
# # Detects loops that copy big objects during each iteration
# - rangeValCopy
# # Settings passed to gocritic.
# # The settings key is the name of a supported gocritic checker.
# # The list of supported checkers can be find in https://go-critic.github.io/overview.
# settings:
# captLocal: # must be valid enabled check name
# # whether to restrict checker to params only (default true)
# paramsOnly: true
# hugeParam:
# # size in bytes that makes the warning trigger (default 80)
# sizeThreshold: 80
# nestingReduce:
# # min number of statements inside a branch to trigger a warning (default 5)
# bodyWidth: 5
# rangeExprCopy:
# # size in bytes that makes the warning trigger (default 512)
# sizeThreshold: 512
# # whether to check test functions (default true)
# skipTestFuncs: true
# rangeValCopy:
# # size in bytes that makes the warning trigger (default 128)
# sizeThreshold: 128
# # whether to check test functions (default true)
# skipTestFuncs: true
# truncateCmp:
# # whether to skip int/uint/uintptr types (default true)
# skipArchDependent: true
# underef:
# # whether to skip (*x).method() calls where x is a pointer receiver (default true)
# skipRecvDeref: false
# wrapcheck:
# ignorePackageGlobs:
# - google.golang.org/grpc/status
# - github.com/coinbase/tex/internal/coordinator/common
issues:
# Excluding configuration per-path, per-linter, per-text and per-source
exclude-rules:
# Exclude some linters from running on tests files.
- path: _test\.go
linters:
# # We're disabling `goconst` in tests, because there are a lot of
# # repeated non-constant strings in the tests that we don't want to
# # change eg. `cosigner id` as the cosigner id etc.
# - goconst
# # We're only enabling `godot` for library and internal code to make documentation
# # generation and/or text-editor's job prettier.
# - godot
# # It's unnecessary to have a hard wrapping requirement for tests.
# - wrapcheck
# # It's unnecessary to enforce errors are not defined dynamically
# - goerr113
# # Ignore revive results for tests because there a lot of conflicting
# # suggestions eg. `var-naming` says we shouldn't use underscores in
# # function names but that's a common pattern that we use.
# - revive
# In tests we disable the check that prevents assigning errors to _
- errcheck
# unnecessary
- dogsled
# duplicate code detection tends to break the subtesting pattern that we use.
- dupl
# unnecessary
- errorlint
# # Exclude some revive messages
# - linters:
# - revive
# text: "var-naming: don't use ALL_CAPS in Go names; use CamelCase"
# # We don't want to check whether statements like `x, err := ...` shadows `err`
# - linters:
# - govet
# text: 'shadow: declaration of "err" shadows declaration at line'
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
max-same-issues: 0
# Fix found issues (if it's supported by the linter). To prevent any problems
# with CI, this is disabled. Use `make lint-fix` instead.
fix: false

42
nekryptology/.spdx.yml Normal file
View File

@ -0,0 +1,42 @@
#
# Copyright Coinbase, Inc. All Rights Reserved.
# Copyright Quilibrium, Inc. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
copyright: |
Copyright Coinbase, Inc. All Rights Reserved.
Copyright Quilibrium, Inc. All Rights Reserved.
license: Apache-2.0
comments:
gitignore: '#'
dockerignore: '#'
sh: '#'
py: '#'
pl: '#'
rb: '#'
yml: '#'
yaml: '#'
go: '//'
rs: '//'
ignore:
- 'pkg/signatures/schnorr/mina/bitvector\.go$'
- 'pkg/core/curves/native/bls48581'
- 'testdata/'
- 'test/data'
- 'Cargo\.lock$'
- 'Cargo\.toml$'
- 'target'
- 'go\.mod$'
- 'go\.sum$'
- '\.gitignore$'
- '\.dockerignore$'
- '\.git$'
- '\.idea$'
- '\.vscode$'
- '\bLICENSE$'
- '\w+\.md$'
- '\w+\.pdf$'
- 'Makefile$'
- .DS_Store
- '\w+\.dat$'

110
nekryptology/CHANGELOG.md Normal file
View File

@ -0,0 +1,110 @@
# Changelog
All notable changes to this repo will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## v1.8.0
- BLS12-381 is now constant time.
- BLS signatures use new CT-based version of the BLS12-381 curve.
## v1.7.0
### Fixed
- Fix an issue in the serialization wrapper for DKLs18 and generalizes the choice of the hash function.
## v1.6.1
### Fixed
- Fix an issue inherited from a transcription error in DKLs18, in which the KOS cOT extension subprotocol was implemented incorrectly.
## v1.6.0
### Add
- Constant time K256 curve.
- Constant time P256 curve.
- New DKLs18 implementation that fixes the issues in v0.
- This also refactors OT, OT Extension, and Schnorr ZKP to their own packages.
- Furthermore, this generalizes the input curve type of the protocol.
- Bulletproof IPP Prove and Verification functions.
### Fixed
- nil check in `paillier.Newpubkey`.
- Mark the current DKLs18 implementation as v0 and not suitable for production.
- Establish a new interface for MPC protocols and their messages.
- Fireblocks bit probe attack.
### Removed
- Godropbox dependency
- Autogeneration of readmes
## v1.5.5
- Fix BBS+ error when revealing all messages
- Add Mina signing
- Add NEM signing
## v1.5.4
- Export Value in ElGamal Public Keys
## v1.5.3
- Address Alpha-Rays attack on GG20 DKG https://eprint.iacr.org/2021/1621.pdf
## v1.5.2
- Export Verifiable Encryption ECC ciphertext values
- Update to GO 1.17
## v1.5.1
- Export tBLS signature Value
- Negate the DKLs signature V value
## v1.5.0
- Add BLS12-381 curve
- Add BLS signatures
- Update to always produce DKLS low-s form
## v1.4.1
- Update accumulator implementation to use alias-ing instead of one field structs
- Update accumulator implementation marshaling implementation
## v1.4.0
- Update verifiable encryption API
## v1.3.0
- Add Accumulator
- Update for new curve abstraction
- Update verifiable encryption API
## v1.2.0
- Add Verifiable Encryption
- Add FROST DKG
- Add DKLS threshold signing
- Add curve abstraction
- Pasta Curves: Pallas and Vesta
- BBS+ signatures
## v1.1.0
- Add recovery id to output of tECDSA signatures in Round 6
- Add Neg and Bytes to EcScalar
- Add SubFieldOrder to Field struct
## v1.0.0
### Added
- This document and other meta-information
- tECDSA dealered and distributed key generations
- tECDSA based on [GG20](https://eprint.iacr.org/2020/540.pdf) signing
- Gennaro [DKG07](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.134.6445&rep=rep1&type=pdf) usable for Ed25519 and BLS keys.
- Shamir Secret Sharing
- Feldman Verifiable Secret Sharing
- Pedersen Verifiable Secret Sharing
- Paillier Encryption Scheme

View File

@ -0,0 +1,186 @@
# Contributing to Kryptology
Kryptology is Apache 2.0 licensed and accepts contributions via
GitHub pull requests.
# Ways to contribute to Kryptology
- Bugs or issues: Report problems or defects as github issues
- Features and enhancements: Provide expanded capabilities or optimizations
- Documentation: Improve existing documentation or create new information
- Tests for events and results:
- Functional
- Performance
- Usability
- Security
- Localization
- Recovery
- Deployability
# Code guidelines
Use go style comments for all public functions, structs, and constants.
Export only what is absolutely necessary.
# The Commit Process
When contributing code, please follow these guidelines:
- Fork the repository and make your changes in a feature branch
- Include unit and integration tests for any new features and updates to existing tests
- Ensure that the unit and integration tests run successfully.
- Check that the lint tests pass
## Important
Use `git rebase origin/master` to limit creating merge commits.
Kryptology accepts single commits. If you have more than one, they will be
squashed when merged.
## Commit Email Address
Your commit email address must match your GitHub or GitLab email address. For more information, see https://help.github.com/articles/setting-your-commit-email-address-in-git/.
## Commit messages
Each commit message consists of a header, a body, and a footer.
The header includes a type, a scope and a subject:
```markdown
<type>(<scope>): <subject>
<BLANK LINE>
<body>
<BLANK LINE>
<footer>
```
`<subject>` *required*, must not be longer than 100 characters.
`<type` *required*, must be lower case and have one of the following values:
- `build:` changes that affect the build system or external dependencies. Kryptology uses buildkite.
- `chore:` some minor change that doesn't fall in any of the other types
- `ci:` changes to the continuous integration configuration files
- `docs:` documentation only change
- `feat:` new feature
- `fix:` a bug fix
`<scope>` *optional*, must be lower case and have a tag about which primitive it affects like 'tecdsa/dkls', 'paillier', 'verenc'.
`<subject>` *optional*, must be lower case and not end in a period describing changes in the imperative-mood.
`<body>` *optional*, characters no more 100 wide, must have a blank line above it describing changes in the imperative-mood.
`<footer>` *optional*, must not be longer than 100 characters.
For more information see [here](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716)
**Kryptology requires that all commits are signed by a PGP key.**
## Issues and Bugs
If you find a bug in the source code, help us by submitting an issue to the repository. Even better, submit a Pull Request with a fix.
Before submitting an issue search the archive, maybe your question is already answered.
If the issue is a bug, and hasn't been reported, open a new issue. Help us to maximize the effort we can spend fixing issues and adding new features, by not reporting duplicate issues.
For security bugs see the security [policy](SECURITY.md).
In general, providing the following information will increase the chances of the issue being handled quickly:
**Expected Behavior** - Is there documentation or an example that represents the discrepancy?
**Actual Behavior** - Be sure to explain why this is a bug for you.
**Steps to Reproduce the Problem** - Code snippets and screen shots are always helpful.
**Environment** - What hardware, OS, and versions are you using?
**Suggest a Fix** - if you can't fix the bug yourself, perhaps you can point to what might cause the problem (line of code or commit)
## Referencing
When contributing a new protocol or cryptosystem please include references, page numbers and equations.
Document any deviations from the protocol and include an updated security proof if needed.
## Hashing functions
When using hash functions in protocols, use the following guidelines when
32-byte output - SHA3-256
\*-byte output - SHAKE-256
Sigma protocols use [Merlin](https://merlin.cool/) transcripts.
## Constant time
We make every effort to make code cryptographically constant time. All contributions to cryptography related code
should be constant time unless explicitly stated and why.
Below are some algorithms for computing constant time operations that can be used and are meant to be examples.
```go
// conditionalMove returns x when i == 0 and y when i == 1
func conditionalMove(x, y *[4]uint64, i int) {
b := uint64(-i)
x[0] ^= (x[0] ^ y[0]) & b
x[1] ^= (x[1] ^ y[1]) & b
x[2] ^= (x[2] ^ y[2]) & b
x[3] ^= (x[3] ^ y[3]) & b
}
```
```go
// conditionalNegate negates x if i == 1, otherwise x is untouched
func conditionalNegate(x *[4]uint64, i int) {
b := uint64(-i)
x[0] = (x[0] ^ b) - b
x[1] = (x[1] ^ b) - b
x[2] = (x[2] ^ b) - b
x[3] = (x[3] ^ b) - b
}
```
```go
// conditionalAdd computes x+=y if i == 1, otherwise x is untouched
func conditionalAdd(x, y *[4]uint64, i int) {
b := uint64(-i)
x[0] += y[0] & b
x[1] += y[1] & b
x[2] += y[2] & b
x[3] += y[3] & b
}
```
```go
// conditionalSub computes x-=y if i == 1, otherwise x is untouched
func conditionalSub(x, y *[4]uint64, i int) {
b := uint64(-i)
x[0] -= y[0] & b
x[1] -= y[1] & b
x[2] -= y[2] & b
x[3] -= y[3] & b
}
```
```go
// isZero returns 1 if x is zero or 0 if non-zero
func isZero(x *[4]uint64) int {
t := x[0]
t |= x[1]
t |= x[2]
t |= x[3]
return int(((int64(t) | int64(-t)) >> 63) + 1)
}
```
```go
// isNonZero returns 1 if x is non-zero, 0 otherwise
func isNonZero(x *[4]uint64) int {
t := x[0]
t |= x[1]
t |= x[2]
t |= x[3]
return int(-((int64(t) | int64(-t)) >> 63))
}
```

13
nekryptology/Dockerfile Normal file
View File

@ -0,0 +1,13 @@
FROM golang:1.18 AS builder
# Install gomarkdoc
RUN GO111MODULE=on go get -u github.com/princjef/gomarkdoc/cmd/gomarkdoc
# Install rust and build spdx
COPY . /kryptology
WORKDIR /kryptology
RUN apt update && apt install -y curl
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs -- | sh -s -- -y
RUN /root/.cargo/bin/cargo build --release --manifest-path=./cmd/spdx/Cargo.toml && \
cp ./cmd/spdx/target/release/spdx /usr/bin/ && \
chmod 755 /usr/bin/spdx

201
nekryptology/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

112
nekryptology/Makefile Normal file
View File

@ -0,0 +1,112 @@
.PHONY: all build bench clean cover deflake fmt lint test test-clean test-long
GOENV=GO111MODULE=on
GO=${GOENV} go
COVERAGE_OUT=/tmp/coverage.out
PACKAGE=./...
TEST_CLAUSE= $(if ${TEST}, -run ${TEST})
DOCTOOLS=docker run --rm -v "$$(pwd)":"$$(pwd)" -w "$$(pwd)" doctools:latest
.PHONY: all
all: githooks test build lint fmt deps docs
.PHONY: build
build:
${GO} build ./...
.PHONY: bench
bench:
${GO} test -v -short -bench=. -test.timeout=0 -run=^noTests ./...
.PHONY: clean
clean:
${GO} clean -cache -modcache -i -r
.PHONY: cover
cover: ## compute and display test coverage report
${GO} test -short -coverprofile=${COVERAGE_OUT} ${PACKAGE}
${GO} tool cover -html=${COVERAGE_OUT}
.PHONY: deps
deps: ## Build dockerized autodoc tools
@docker build -t doctools:latest .
.PHONY: docs
docs: ## Apply copyright headers and re-build package-level documents
@${DOCTOOLS} spdx
gen-readme-docs:
@${DOCTOOLS} gomarkdoc --output '{{.Dir}}/README.md' ./...
.PHONY: deflake
deflake: ## Runs tests many times to detect flakes
${GO} test -count=1000 -short -timeout 0 ${TEST_CLAUSE} ./...
.PHONY: fmt
fmt:
${GO} fmt ./...
.PHONY: githooks
githooks:
git config core.hooksPath .githooks
.PHONY: lint
lint:
${GO} vet ./...
golangci-lint run
.PHONY: lint-fix
lint-fix:
${GO} vet ./...
golangci-lint run --fix
.PHONY: test
test:
${GO} test -v -short ${TEST_CLAUSE} ./...
.PHONY: test-clean
test-clean: ## Clear test cache and force all tests to be rerun
${GO} clean -testcache && ${GO} test -count=1 -short ${TEST_CLAUSE} ./...
.PHONY: test-long
test-long: ## Runs all tests, including long-running tests
${GO} test ${TEST_CLAUSE} ./...
.PHONY: run-dkg-bls
run-dkg-bls: ## Runs test of gennaro dkg w/ BLS signature
${GO} run test/dkg/bls/main.go
.PHONY: run-dkg-ed25519
run-dkg-ed25519: ## Runs test of gennaro dkg w/ ed25519 signature
${GO} run test/dkg/ed25519/main.go
.PHONY: run-frost-dkg-bls
run-frost-dkg-bls: ## Runs test of frost dkg w/ BLS signature
${GO} run test/frost_dkg/bls/main.go
.PHONY: run-frost-dkg-ed25519
run-frost-dkg-ed25519: ## Runs test of frost dkg w/ ed25519 signature
${GO} run test/frost_dkg/ed25519/main.go
.PHONY: run-frost-dkg-ecdsa
run-frost-dkg-ecdsa: ## Runs test of frost dkg w/ ecdsa signature
${GO} run test/frost_dkg/k256/main.go
.PHONY: run-frost-full
run-frost-full: ## Runs test of frost dkg w/ frost signature
${GO} run test/frost_dkg/frost/main.go
.PHONY: run-verenc-elgamal
run-verenc-elgamal: ## Runs test of el-gamal verifiable encryption
${GO} run test/verenc/elgamal/main.go
.PHONY: run-accumulator-ecc
run-accumulator-ecc: ## Runs test of cryptographic accumulator
${GO} run test/accumulator/ecc/main.go
.PHONY: compare-bench
compare-bench: ## Runs bench on master and the current branch and compares the result
bash scripts/perf-comp-local

6
nekryptology/NOTICE Normal file
View File

@ -0,0 +1,6 @@
Quilibrium Nekryptology
Copyright [2021] contributors to Coinbase Kryptology
Copyright [2023] contributors to Quilibrium Nekryptology
This product includes software developed at Coinbase (http://www.coinbase.com/)
and Quilibrium (https://www.quilibrium.com).

96
nekryptology/README.md Normal file
View File

@ -0,0 +1,96 @@
# Nekryptology
Coinbase's advanced cryptography library, brought back from the dead and enhanced
## Differences from Kryptology
- Schnorr proofs are patched to use hash-to-curve, fixes a bug where depending on the curve there may be a small to very large percentage of proofs that are invalid encodings
- Supports BLS48-581 (uses MIRACL-generated implementation, slightly altered to align to curves.Scalar/curves.Point interfaces)
- Enhanced KOS15 implementation to support arbitrary computational and statistical security parameters
- Updated DKLs18 multiplication to use KOS15 enhancements
- OT-based t-of-n multiplication scheme (borrowed from DKLs19)
- Supports <=2048-bit IQC primitives (uses harmony-one's implementation, slightly adjusted to fix vulnerabilities in FS transform)
- Added Wesolowski VDF
- GG20 is removed
## Quickstart
Use the latest version of this library:
```
go get source.quilibrium.com/quilibrium/monorepo/nekryptology
```
## Documentation
Public documentations can be found at https://pkg.go.dev/source.quilibrium.com/quilibrium/monorepo/nekryptology
To access the documentation of the local version, run `godoc -http=:6060` and open
the following url in your browser.
http://localhost:6060/pkg/source.quilibrium.com/quilibrium/monorepo/nekryptology/
## Developer Setup
**Prerequisites**: `golang 1.18`, `make`
```
git clone git@source.quilibrium.com/quilibrium/monorepo/nekryptology.git && make
```
## Components
The following is the list of primitives and protocols that are implemented in this repository.
### Curves
The curve abstraction code can be found at [pkg/core/curves/curve.go](pkg/core/curves/curve.go)
The curves that implement this abstraction are as follows.
- [BLS12377](pkg/core/curves/bls12377_curve.go)
- [BLS12381](pkg/core/curves/bls12381_curve.go)
- [BLS48581](pkg/core/curves/bls48581_curve.go)
- [Ed25519](pkg/core/curves/ed25519_curve.go)
- [Secp256k1](pkg/core/curves/k256_curve.go)
- [P256](pkg/core/curves/p256_curve.go)
- [Pallas](pkg/core/curves/pallas_curve.go)
### IQC
The IQC abstraction code can be found at [pkg/core/iqc/classgroup.go](pkg/core/iqc/classgroup.go).
### Protocols
The generic protocol interface [pkg/core/protocol/protocol.go](pkg/core/protocol/protocol.go).
- [Cryptographic Accumulators](pkg/accumulator)
- [Bulletproof](pkg/bulletproof)
- Oblivious Transfer
- [Verifiable Simplest OT](pkg/ot/base/simplest)
- [KOS OT Extension](pkg/ot/extension/kos)
- Threshold ECDSA Signature
- [DKLs18 - DKG and Signing](pkg/tecdsa/dkls/v1)
- GG20: The authors of GG20 have stated that the protocol is obsolete and should not be used. See [https://eprint.iacr.org/2020/540.pdf](https://eprint.iacr.org/2020/540.pdf).
- [GG20 - DKG](pkg/dkg/gennaro)
- [GG20 - Signing](pkg/tecdsa/gg20)
- Threshold Schnorr Signature
- [FROST threshold signature - DKG](pkg/dkg/frost)
- [FROST threshold signature - Signing](pkg/ted25519/frost)
- [Paillier encryption system](pkg/paillier)
- Secret Sharing Schemes
- [Shamir's secret sharing scheme](pkg/sharing/shamir.go)
- [Pedersen](pkg/sharing/pedersen.go)
- [Feldman](pkg/sharing/feldman.go)
- [Verifiable encryption](pkg/verenc)
- [ZKP Schnorr](pkg/zkp/schnorr)
## Contributing
- [Versioning](https://blog.golang.org/publishing-go-modules): `vMajor.Minor.Patch`
- Major revision indicates breaking API change or significant new features
- Minor revision indicates no API breaking changes and may include significant new features or documentation
- Patch indicates no API breaking changes and may include only fixes
## [References](docs/)
- [[GG20] _One Round Threshold ECDSA with Identifiable Abort._](https://eprint.iacr.org/2020/540.pdf)
- [[specV5] _One Round Threshold ECDSA for Coinbase._](docs/Coinbase_Pseudocode_v5.pdf)
- [[EL20] _Eliding RSA Group Membership Checks._](docs/rsa-membership.pdf) [src](https://www.overleaf.com/project/5f9c3b0624a9a600012037a3)
- [[P99] _Public-Key Cryptosystems Based on Composite Degree Residuosity Classes._](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.112.4035&rep=rep1&type=pdf)

View File

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,5 @@
# benchcomp
This command receives two benchmarks files as input and flags the benchmarks that have
degraded by more than a threshold amount. The main goal of this tool is to be used in CI
to check each PR.

View File

@ -0,0 +1,137 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// benchcomp implements a command that receives two benchmarks files as input and flags the benchmarks that have
// degraded by more than a threshold amount. The main goal of this tool is to be used in CI
// to check each PR.
package main
import (
"bytes"
"flag"
"fmt"
"io"
"io/ioutil"
"github.com/pkg/errors"
"golang.org/x/tools/benchmark/parse"
)
const THRESHOLD = 1.1
func main() {
cReader, nReader, err := parseCmdArgs()
if err != nil {
panic(err)
}
if err := Compare(cReader, nReader); err != nil {
panic(err)
}
}
func parseCmdArgs() (io.Reader, io.Reader, error) {
cFlag := flag.String("current", "current-bench.log", "The patch to the log file containing the output of the current benchmark result.")
nFlag := flag.String("new", "new-bench.log", "The patch to the log file containing the output of the new benchmark result.")
flag.Parse()
cBytes, err := ioutil.ReadFile(*cFlag)
if err != nil {
return nil, nil, fmt.Errorf("reading current log file %v", err)
}
nBytes, err := ioutil.ReadFile(*nFlag)
if err != nil {
return nil, nil, fmt.Errorf("reading new log file %v", err)
}
return bytes.NewBuffer(cBytes), bytes.NewBuffer(nBytes), nil
}
// Compare expects two readers which contain the output of two runs of `go test -bench` command and throws an error if
// the performance has degraded by more than `THRESHOLD` amount.
func Compare(currBench, newBench io.Reader) error {
c, n, err := parseBenchmarks(currBench, newBench)
if err != nil {
return errors.Wrap(err, "parsing benchmark outputs")
}
perfDeviations := make([]string, 0)
for bench := range c {
if _, ok := n[bench]; !ok {
// New benchmark, skipping
continue
} else {
currB := c[bench]
newB := n[bench]
err = compareBenches(currB, newB)
if err != nil {
perfDeviations = append(perfDeviations, fmt.Sprintf("%v", err))
}
}
}
if len(perfDeviations) != 0 {
return fmt.Errorf("%#v", perfDeviations)
}
return nil
}
func parseBenchmarks(currBench, newBench io.Reader) (parse.Set, parse.Set, error) {
c, err := parse.ParseSet(currBench)
if err != nil {
return nil, nil, errors.Wrap(err, "parsing current benchmark output")
}
n, err := parse.ParseSet(newBench)
if err != nil {
return nil, nil, errors.Wrap(err, "parsing new benchmark output")
}
return c, n, nil
}
func compareBenches(currB, newB []*parse.Benchmark) error {
currMap := make(map[string]*parse.Benchmark)
newMap := make(map[string]*parse.Benchmark)
for _, b := range currB {
// TODO: double check what is in Name
currMap[b.Name] = b
}
for _, b := range newB {
// TODO: double check what is in Name
newMap[b.Name] = b
}
for name := range currMap {
if _, ok := newMap[name]; ok {
compare := []struct {
current float64
new float64
}{
{
current: float64(currMap[name].AllocedBytesPerOp),
new: float64(newMap[name].AllocedBytesPerOp),
},
{
current: float64(currMap[name].AllocsPerOp),
new: float64(newMap[name].AllocsPerOp),
},
{
current: currMap[name].NsPerOp,
new: newMap[name].NsPerOp,
},
{
current: currMap[name].MBPerS,
new: newMap[name].MBPerS,
},
}
for _, t := range compare {
if t.new > t.current*THRESHOLD {
percent := (t.new - t.current) * 100 / t.current
return fmt.Errorf("benchmark %s exceeded previous benchmark by %0.2f percent. Current: %0.2f, New: %0.2f", name, percent, t.current, t.new)
}
}
}
}
return nil
}

View File

@ -0,0 +1,44 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package main
import (
"strings"
"testing"
)
func TestPerformanceDeviation(t *testing.T) {
goodComputationBench := "Benchmark_ABC 842688 1396 ns/op"
badComputationBench := "Benchmark_ABC 842688 13960 ns/op"
if err := Compare(strings.NewReader(goodComputationBench), strings.NewReader(badComputationBench)); err == nil {
t.Errorf("Expected performance deviation: [%v], but did not detect any!", err)
}
}
func TestParsing(t *testing.T) {
// TODO: the current parser ignores the 3rd and 4th column of data (e.g., the custom benchmarks)
output := `
garbage data
BenchmarkSigning/Secp256k1_-_5_of_9-16 1 5794642205 ns/op
BenchmarkSign2p-16 2 685590314 ns/op 29319 bytes/sign 16.00 msgs/sign
garbage data
`
o, _, err := parseBenchmarks(strings.NewReader(output), strings.NewReader(output))
if err != nil {
t.Errorf("Failed to parse test input %v", err)
}
if len(o) != 2 {
t.Errorf("Incorrect output length. Expected 2, got %#v", o)
}
if _, ok := o["BenchmarkSigning/Secp256k1_-_5_of_9-16"]; !ok {
t.Errorf("Did not find BenchmarkSigning/Secp256k1_-_5_of_9-16 in the parsed output")
}
if _, ok := o["BenchmarkSign2p-16"]; !ok {
t.Errorf("Did not find BenchmarkSign2p-16 in the parsed output")
}
}

3
nekryptology/cmd/spdx/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
Cargo.lock
target/
.idea/

View File

@ -0,0 +1,13 @@
[package]
authors = ["Coinbase Core Cryptography Developers"]
description = "Checks and adds the specified files for the SPDX-License-Identifier and Copyright headers"
edition = "2018"
name = "spdx"
readme = "README.md"
version = "1.0.0"
[dependencies]
regex = "1.5"
serde = { version = "1.0", features = ["serde_derive"] }
serde_yaml = "0.8"
structopt = "0.3"

View File

@ -0,0 +1 @@
1.53.0

View File

@ -0,0 +1,21 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
use std::path::PathBuf;
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
pub struct Commands {
#[structopt(short, long, parse(from_os_str), default_value = ".spdx.yml")]
pub config_file: PathBuf,
#[structopt(long)]
pub copyright: Option<String>,
#[structopt(short, long)]
pub ignore: Option<Vec<String>>,
#[structopt(long)]
pub license: Option<String>,
#[structopt(name = "DIR", parse(from_os_str), default_value = ".")]
pub starting_directory: PathBuf,
}

View File

@ -0,0 +1,47 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Config {
/// The name of the copyright owner
pub copyright: String,
/// The chosen license
pub license: String,
/// File extensions to comments to use
/// For example
/// yml: #
/// python: #
/// sh: #
/// go: //
/// rs: ///
pub comments: HashMap<String, String>,
/// Ignore files that match these patterns as regex's
pub ignore: HashSet<String>,
}
impl Default for Config {
fn default() -> Self {
let mut comments = HashMap::new();
comments.insert("gitignore".to_string(), "#".to_string());
comments.insert("dockerignore".to_string(), "#".to_string());
comments.insert("sh".to_string(), "#".to_string());
comments.insert("py".to_string(), "#".to_string());
comments.insert("pl".to_string(), "#".to_string());
comments.insert("rb".to_string(), "#".to_string());
comments.insert("yml".to_string(), "#".to_string());
comments.insert("yaml".to_string(), "#".to_string());
comments.insert("go".to_string(), "//".to_string());
comments.insert("rs".to_string(), "///".to_string());
Config {
copyright: "Copyright Coinbase, Inc. All Rights Reserved.".to_string(),
license: "Apache-2.0".to_string(),
comments,
ignore: HashSet::new(),
}
}
}

View File

@ -0,0 +1,175 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
#![deny(warnings)]
mod commands;
mod config;
use commands::*;
use config::*;
use regex::{Regex, RegexSet};
use std::{
collections::{HashSet, VecDeque},
env,
ffi::OsStr,
fs::{self, File, OpenOptions},
io::{Read, Seek, SeekFrom, Write},
path::{Path, PathBuf},
process::exit,
};
use structopt::StructOpt;
fn main() -> Result<(), String> {
let mut cmd: Commands = Commands::from_args();
let config = read_config(&cmd)?;
if cmd.starting_directory.ends_with(".") && cmd.starting_directory.starts_with(".") {
cmd.starting_directory = get_proj_root();
}
let ignore_patterns = RegexSet::new(config.ignore.iter()).unwrap();
let mut examine = VecDeque::new();
examine.push_back(cmd.starting_directory);
while !examine.is_empty() {
let curdir = examine.pop_front().unwrap();
if curdir.is_dir() {
match curdir.to_str() {
None => {
eprintln!("Unable to determine directory name: {:?}", curdir);
continue;
}
Some(s) => {
if ignore_patterns.is_match(s) {
println!("Ignoring {}", s);
continue;
}
println!("Scanning {}", s);
}
}
// Traverse the current directory and subdirectories
let readdir = fs::read_dir(curdir).map_err(|e| e.to_string())?;
for path in readdir {
examine.push_back(path.map_err(|e| e.to_string())?.path());
}
} else if curdir.is_file() {
match curdir.to_str() {
None => eprintln!("Can't handle file '{:?}'", curdir),
Some(s) => {
if ignore_patterns.is_match(s) {
println!("Ignoring {}", s);
continue;
}
process_file(s, &config)?;
}
}
}
}
Ok(())
}
fn process_file(file_name: &str, config: &Config) -> Result<(), String> {
println!("Processing {}", file_name);
let mut f = OpenOptions::new()
.read(true)
.write(true)
.open(PathBuf::from(file_name.to_string()))
.map_err(|e| e.to_string())?;
let length = f.metadata().map_err(|e| e.to_string())?.len() as usize;
let mut buffer = String::new();
let size = f.read_to_string(&mut buffer).map_err(|e| e.to_string())?;
if size != length {
return Err("Unable to read entire file contents".to_string());
}
let suffix = Path::new(file_name).extension().and_then(OsStr::to_str);
let sfx = match suffix {
Some(s) => s,
None => Path::new(file_name)
.file_name()
.and_then(OsStr::to_str)
.unwrap(),
};
match config.comments.get(sfx) {
None => {}
Some(comment) => {
// Found, check for header
let rx_header = Regex::new(
format!(
r#"{}\s+{}\s+{}\s+{}\s+{}\s+SPDX-License-Identifier:\s+{}\s+{}\s+"#,
comment, comment, config.copyright, comment, comment, config.license, comment
)
.as_str(),
)
.unwrap();
if rx_header.is_match(&buffer) {
return Ok(());
}
let header = format!(
r#"{}
{} {}
{}
{} SPDX-License-Identifier: {}
{}
"#,
comment, comment, config.copyright, comment, comment, config.license, comment
);
f.seek(SeekFrom::Start(0)).map_err(|e| e.to_string())?;
f.write_all(header.as_bytes()).map_err(|e| e.to_string())?;
f.write_all(buffer.as_bytes()).map_err(|e| e.to_string())?;
}
}
Ok(())
}
fn get_proj_root() -> PathBuf {
match env::current_dir() {
Err(e) => {
eprintln!("{}", e);
exit(5);
}
Ok(p) => p,
}
}
fn read_config(cmd: &Commands) -> Result<Config, String> {
let mut config = Config::default();
match cmd.ignore.as_ref() {
None => {}
Some(patterns) => {
config.ignore = patterns.iter().cloned().collect::<HashSet<_>>();
}
}
match cmd.license.as_ref() {
None => {}
Some(s) => config.license = s.clone(),
}
match cmd.copyright.as_ref() {
None => {}
Some(s) => config.copyright = s.clone(),
}
if !cmd.config_file.exists() {
return Ok(config);
}
let mut f = File::open(&cmd.config_file).map_err(|e| e.to_string())?;
let len = f.metadata().map_err(|e| e.to_string())?.len() as usize;
let mut bytes = Vec::with_capacity(len);
let s = f.read_to_end(&mut bytes).map_err(|e| e.to_string())?;
if s != len {
return Err("Unable to read entire file contents".to_string());
}
let cfg = serde_yaml::from_slice::<Config>(bytes.as_slice()).map_err(|e| e.to_string())?;
if !cfg.copyright.is_empty() {
config.copyright = cfg.copyright.clone();
}
if !cfg.license.is_empty() {
config.license = cfg.license.clone();
}
if !cfg.ignore.is_empty() {
config.ignore = cfg.ignore.clone();
}
config.comments = cfg.comments;
Ok(config)
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,4 @@
type=routine
risk=low
impact=sev5

Binary file not shown.

View File

28
nekryptology/go.mod Normal file
View File

@ -0,0 +1,28 @@
module source.quilibrium.com/quilibrium/monorepo/nekryptology
go 1.18
require (
filippo.io/edwards25519 v1.0.0-rc.1
git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9
github.com/btcsuite/btcd v0.21.0-beta.0.20201114000516-e9c7a5ac6401
github.com/btcsuite/btcutil v1.0.2
github.com/bwesterb/go-ristretto v1.2.3
github.com/consensys/gnark-crypto v0.5.3
github.com/gtank/merlin v0.1.1
github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.7.0
golang.org/x/crypto v0.9.0
golang.org/x/sys v0.8.0
golang.org/x/tools v0.1.5
)
require (
github.com/cloudflare/circl v1.3.3
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
)

123
nekryptology/go.sum Normal file
View File

@ -0,0 +1,123 @@
filippo.io/edwards25519 v1.0.0-rc.1 h1:m0VOOB23frXZvAOK44usCgLWvtsxIoMCTBGJZlpmGfU=
filippo.io/edwards25519 v1.0.0-rc.1/go.mod h1:N1IkdkCkiLB6tki+MYJoSx2JTY9NUlxZE7eHn5EwJns=
git.sr.ht/~sircmpwn/getopt v0.0.0-20191230200459-23622cc906b3/go.mod h1:wMEGFFFNuPos7vHmWXfszqImLppbc0wEhh6JBfJIUgw=
git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9 h1:Ahny8Ud1LjVMMAlt8utUFKhhxJtwBAualvsbc/Sk7cE=
git.sr.ht/~sircmpwn/go-bare v0.0.0-20210406120253-ab86bc2846d9/go.mod h1:BVJwbDfVjCjoFiKrhkei6NdGcZYpkDkdyCdg1ukytRA=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
github.com/btcsuite/btcd v0.21.0-beta.0.20201114000516-e9c7a5ac6401 h1:0tjUthKCaF8zwF9Qg7lfnep0xdo4n8WiFUfQPaMHX6g=
github.com/btcsuite/btcd v0.21.0-beta.0.20201114000516-e9c7a5ac6401/go.mod h1:Sv4JPQ3/M+teHz9Bo5jBpkNcP0x6r7rdihlNL/7tTAs=
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA=
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg=
github.com/btcsuite/btcutil v1.0.2 h1:9iZ1Terx9fMIOtq1VrwdqfsATL9MC2l8ZrUY6YZ2uts=
github.com/btcsuite/btcutil v1.0.2/go.mod h1:j9HUFwoQRsZL3V4n+qG+CUnEGHOarIxfC3Le2Yhbcts=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg=
github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY=
github.com/btcsuite/goleveldb v1.0.0/go.mod h1:QiK9vBlgftBg6rWQIj6wFzbPfRjiykIEhBH4obrXJ/I=
github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc=
github.com/btcsuite/snappy-go v1.0.0/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc=
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY=
github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs=
github.com/bwesterb/go-ristretto v1.2.0 h1:xxWOVbN5m8NNKiSDZXE1jtZvZnC6JSJ9cYFADiZcWtw=
github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
github.com/bwesterb/go-ristretto v1.2.3 h1:1w53tCkGhCQ5djbat3+MH0BAQ5Kfgbt56UZQ/JMzngw=
github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0=
github.com/cloudflare/circl v1.3.3 h1:fE/Qz0QdIGqeWfnwq0RE0R7MI51s0M2E4Ga9kq5AEMs=
github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA=
github.com/consensys/bavard v0.1.8-0.20210915155054-088da2f7f54a/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI=
github.com/consensys/gnark-crypto v0.5.3 h1:4xLFGZR3NWEH2zy+YzvzHicpToQR8FXFbfLNvpGB+rE=
github.com/consensys/gnark-crypto v0.5.3/go.mod h1:hOdPlWQV1gDLp7faZVeg8Y0iEPFaOUnCc4XeCCk96p0=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/decred/dcrd/lru v1.0.0/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/gtank/merlin v0.1.1 h1:eQ90iG7K9pOhtereWsmyRJ6RAwcP4tHTDBHXNg+u5is=
github.com/gtank/merlin v0.1.1/go.mod h1:T86dnYJhcGOh5BjZFCJWTDeTK7XW8uE+E21Cy/bIQ+s=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ=
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c=
github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8=
github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643 h1:hLDRPB66XQT/8+wG9WsDpiCvZf1yKO7sz7scAjSlBa0=
github.com/mimoo/StrobeGo v0.0.0-20181016162300-f8f6d4d2b643/go.mod h1:43+3pMjjKimDBf5Kr4ZFNGbLql1zKkbImw+fZbw3geM=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200115085410-6d4e4cb37c7d/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97 h1:/UOmuWzQfxxo9UtlXMwuQU8CMgg1eZXqTRwkSQJWKOI=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210420205809-ac73e9fd8988/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I=
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0 h1:w8ZOecv6NaNa/zC8944JTU3vz4u6Lagfk4RPQxv92NQ=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA=

View File

@ -0,0 +1,22 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package internal
import "fmt"
var (
ErrNotOnCurve = fmt.Errorf("point not on the curve")
ErrPointsDistinctCurves = fmt.Errorf("points must be from the same curve")
ErrZmMembership = fmt.Errorf("x ∉ Z_m")
ErrResidueOne = fmt.Errorf("value must be 1 (mod N)")
ErrNCannotBeZero = fmt.Errorf("N cannot be 0")
ErrNilArguments = fmt.Errorf("arguments cannot be nil")
ErrZeroValue = fmt.Errorf("arguments cannot be 0")
ErrInvalidRound = fmt.Errorf("invalid round method called")
ErrIncorrectCount = fmt.Errorf("incorrect number of inputs")
ErrInvalidJson = fmt.Errorf("json format does not contain the necessary data")
)

View File

@ -0,0 +1,93 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package internal
import (
"bytes"
"crypto/sha256"
"fmt"
"golang.org/x/crypto/hkdf"
)
// Hash computes the HKDF over many values
// iteratively such that each value is hashed separately
// and based on preceding values
//
// The first value is computed as okm_0 = KDF(f || value) where
// f is a byte slice of 32 0xFF
// salt is zero-filled byte slice with length equal to the hash output length
// info is the protocol name
// okm is the 32 byte output
//
// The each subsequent iteration is computed by as okm_i = KDF(f_i || value || okm_{i-1})
// where f_i = 2^b - 1 - i such that there are 0xFF bytes prior to the value.
// f_1 changes the first byte to 0xFE, f_2 to 0xFD. The previous okm is appended to the value
// to provide cryptographic domain separation.
// See https://signal.org/docs/specifications/x3dh/#cryptographic-notation
// and https://signal.org/docs/specifications/xeddsa/#hash-functions
// for more details.
// This uses the KDF function similar to X3DH for each `value`
// But changes the key just like XEdDSA where the prefix bytes change by a single bit
func Hash(info []byte, values ...[]byte) ([]byte, error) {
// Don't accept any nil arguments
if anyNil(values...) {
return nil, ErrNilArguments
}
salt := make([]byte, 32)
okm := make([]byte, 32)
f := bytes.Repeat([]byte{0xFF}, 32)
for _, b := range values {
ikm := append(f, b...)
ikm = append(ikm, okm...)
kdf := hkdf.New(sha256.New, ikm, salt, info)
n, err := kdf.Read(okm)
if err != nil {
return nil, err
}
if n != len(okm) {
return nil, fmt.Errorf("unable to read expected number of bytes want=%v got=%v", len(okm), n)
}
ByteSub(f)
}
return okm, nil
}
func anyNil(values ...[]byte) bool {
for _, x := range values {
if x == nil {
return true
}
}
return false
}
// ByteSub is a constant time algorithm for subtracting
// 1 from the array as if it were a big number.
// 0 is considered a wrap which resets to 0xFF
func ByteSub(b []byte) {
m := byte(1)
for i := 0; i < len(b); i++ {
b[i] -= m
// If b[i] > 0, s == 0
// If b[i] == 0, s == 1
// Computing IsNonZero(b[i])
s1 := int8(b[i]) >> 7
s2 := -int8(b[i]) >> 7
s := byte((s1 | s2) + 1)
// If s == 0, don't subtract anymore
// s == 1, continue subtracting
m = s & m
// If s == 0 this does nothing
// If s == 1 reset this value to 0xFF
b[i] |= -s
}
}

View File

@ -0,0 +1,62 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package internal
import (
"bytes"
"testing"
"github.com/stretchr/testify/require"
)
func TestByteSub(t *testing.T) {
f := bytes.Repeat([]byte{0xFF}, 32)
ByteSub(f)
require.Equal(t, f[0], byte(0xFE))
for i := 1; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
ByteSub(f)
require.Equal(t, f[0], byte(0xFD))
for i := 1; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
f[0] = 0x2
ByteSub(f)
for i := 1; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
ByteSub(f)
require.Equal(t, f[0], byte(0xFF))
require.Equal(t, f[1], byte(0xFE))
for i := 2; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
ByteSub(f)
require.Equal(t, f[0], byte(0xFE))
require.Equal(t, f[1], byte(0xFE))
for i := 2; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
f[0] = 1
f[1] = 1
ByteSub(f)
require.Equal(t, f[0], byte(0xFF))
require.Equal(t, f[1], byte(0xFF))
require.Equal(t, f[2], byte(0xFE))
for i := 3; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
}
func TestByteSubAll1(t *testing.T) {
f := bytes.Repeat([]byte{0x1}, 32)
ByteSub(f)
for i := 0; i < len(f); i++ {
require.Equal(t, f[i], byte(0xFF))
}
}

View File

@ -0,0 +1,44 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package internal
import (
"crypto/elliptic"
"math/big"
"filippo.io/edwards25519"
)
func CalcFieldSize(curve elliptic.Curve) int {
bits := curve.Params().BitSize
return (bits + 7) / 8
}
func ReverseScalarBytes(inBytes []byte) []byte {
outBytes := make([]byte, len(inBytes))
for i, j := 0, len(inBytes)-1; j >= 0; i, j = i+1, j-1 {
outBytes[i] = inBytes[j]
}
return outBytes
}
func BigInt2Ed25519Point(y *big.Int) (*edwards25519.Point, error) {
b := y.Bytes()
var arr [32]byte
copy(arr[32-len(b):], b)
return edwards25519.NewIdentityPoint().SetBytes(arr[:])
}
func BigInt2Ed25519Scalar(x *big.Int) (*edwards25519.Scalar, error) {
// big.Int is big endian; ed25519 assumes little endian encoding
kBytes := ReverseScalarBytes(x.Bytes())
var arr [32]byte
copy(arr[:], kBytes)
return edwards25519.NewScalar().SetCanonicalBytes(arr[:])
}

View File

@ -0,0 +1,21 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package internal
import (
"math/big"
)
// B10 creating a big.Int from a base 10 string. panics on failure to
// ensure zero-values aren't used in place of malformed strings.
func B10(s string) *big.Int {
x, ok := new(big.Int).SetString(s, 10)
if !ok {
panic("Couldn't derive big.Int from string")
}
return x
}

View File

@ -0,0 +1,4 @@
# Cryptographic Accumulators
This package cryptographic accumulators. At the moment, it contains an implementation of
[Dynamic Universal Accumulator with Batch Update over Bilinear Groups](https://eprint.iacr.org/2020/777.pdf)

View File

@ -0,0 +1,172 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package accumulator implements the cryptographic accumulator as described in https://eprint.iacr.org/2020/777.pdf
// It also implements the zero knowledge proof of knowledge protocol
// described in section 7 of the paper.
// Note: the paper only describes for non-membership witness case, but we don't
// use non-membership witness. We only implement the membership witness case.
package accumulator
import (
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
type structMarshal struct {
Value []byte `bare:"value"`
Curve string `bare:"curve"`
}
type Element curves.Scalar
// Coefficient is a point
type Coefficient curves.Point
// Accumulator is a point
type Accumulator struct {
value curves.Point
}
// New creates a new accumulator.
func (acc *Accumulator) New(curve *curves.PairingCurve) (*Accumulator, error) {
// If we need to support non-membership witness, we need to implement Accumulator Initialization
// as described in section 6 of <https://eprint.iacr.org/2020/777.pdf>
// for now we don't need non-membership witness
// i.e., it computes V0 = prod(y + α) * P, y ∈ Y_V0, P is a generator of G1. Since we do not use non-membership witness
// we just set the initial accumulator a G1 generator.
acc.value = curve.Scalar.Point().Generator()
return acc, nil
}
// WithElements initializes a new accumulator prefilled with entries
// Each member is assumed to be hashed
// V = prod(y + α) * V0, for all y∈ Y_V
func (acc *Accumulator) WithElements(curve *curves.PairingCurve, key *SecretKey, m []Element) (*Accumulator, error) {
_, err := acc.New(curve)
if err != nil {
return nil, err
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// AddElements accumulates a set of elements into the accumulator.
func (acc *Accumulator) AddElements(key *SecretKey, m []Element) (*Accumulator, error) {
if acc.value == nil || key.value == nil {
return nil, fmt.Errorf("accumulator and secret key should not be nil")
}
y, err := key.BatchAdditions(m)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Add accumulates a single element into the accumulator
// V' = (y + alpha) * V
func (acc *Accumulator) Add(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
acc.value = acc.value.Mul(y)
return acc, nil
}
// Remove removes a single element from accumulator if it exists
// V' = 1/(y+alpha) * V
func (acc *Accumulator) Remove(key *SecretKey, e Element) (*Accumulator, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil || e == nil {
return nil, fmt.Errorf("accumulator, secret key and element should not be nil")
}
y := e.Add(key.value) // y + alpha
y, err := y.Invert() // 1/(y+alpha)
if err != nil {
return nil, err
}
acc.value = acc.value.Mul(y)
return acc, nil
}
// Update performs a batch addition and deletion as described on page 7, section 3 in
// https://eprint.iacr.org/2020/777.pdf
func (acc *Accumulator) Update(key *SecretKey, additions []Element, deletions []Element) (*Accumulator, []Coefficient, error) {
if acc.value == nil || acc.value.IsIdentity() || key.value == nil {
return nil, nil, fmt.Errorf("accumulator and secret key should not be nil")
}
// Compute dA(-alpha) = prod(y + alpha), y in the set of A ⊆ ACC-Y_V
a, err := key.BatchAdditions(additions)
if err != nil {
return nil, nil, err
}
// Compute dD(-alpha) = 1/prod(y + alpha), y in the set of D ⊆ Y_V
d, err := key.BatchDeletions(deletions)
if err != nil {
return nil, nil, err
}
// dA(-alpha)/dD(-alpha)
div := a.Mul(d)
newAcc := acc.value.Mul(div)
// build an array of coefficients
elements, err := key.CreateCoefficients(additions, deletions)
if err != nil {
return nil, nil, err
}
coefficients := make([]Coefficient, len(elements))
for i := 0; i < len(elements); i++ {
coefficients[i] = acc.value.Mul(elements[i])
}
acc.value = newAcc
return acc, coefficients, nil
}
// MarshalBinary converts Accumulator to bytes
func (acc Accumulator) MarshalBinary() ([]byte, error) {
if acc.value == nil {
return nil, fmt.Errorf("accumulator cannot be nil")
}
tv := &structMarshal{
Value: acc.value.ToAffineCompressed(),
Curve: acc.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets Accumulator from bytes
func (acc *Accumulator) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
acc.value = value
return nil
}

View File

@ -0,0 +1,178 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestNewAccumulator100(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10K(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestNewAccumulator10M(t *testing.T) {
// Initiating 10M values takes time
if testing.Short() {
t.Skip("skipping test in short mode.")
}
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestWithElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
newAcc, err := new(Accumulator).WithElements(curve, key, elements)
require.NoError(t, err)
require.NotNil(t, newAcc)
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Identity().ToAffineCompressed())
require.NotEqual(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
_, _ = newAcc.Remove(key, element1)
_, _ = newAcc.Remove(key, element2)
require.Equal(t, newAcc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAdd(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestRemove(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element := curve.Scalar.Hash([]byte("value1"))
require.NoError(t, err)
require.NotNil(t, element)
// add element
_, _ = acc.Add(key, element)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
// remove element
acc, err = acc.Remove(key, element)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAddElements(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc := &Accumulator{curve.PointG1.Generator()}
_, _ = acc.New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, err = acc.AddElements(key, elements)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}
func TestAccumulatorMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
point := curve.PointG1.Generator().Mul(curve.Scalar.New(2))
data, err := Accumulator{point}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
// element cannot be empty
_, err = Accumulator{}.MarshalBinary()
require.Error(t, err)
e := &Accumulator{curve.PointG1.Generator()}
_ = e.UnmarshalBinary(data)
require.True(t, e.value.Equal(point))
}
func TestUpdate(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, err := new(SecretKey).New(curve, seed[:])
require.NoError(t, err)
require.NotNil(t, key)
acc, err := new(Accumulator).New(curve)
require.NoError(t, err)
require.NotNil(t, acc)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
elements := []Element{element1, element2, element3}
acc, _, err = acc.Update(key, elements, nil)
require.NoError(t, err)
require.NotEqual(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
acc, _, err = acc.Update(key, nil, elements)
require.NoError(t, err)
require.Equal(t, acc.value.ToAffineCompressed(), curve.PointG1.Generator().ToAffineCompressed())
}

View File

@ -0,0 +1,246 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// SecretKey is the secret alpha only held by the accumulator manager.
type SecretKey struct {
value curves.Scalar
}
// New creates a new secret key from the seed.
func (sk *SecretKey) New(curve *curves.PairingCurve, seed []byte) (*SecretKey, error) {
sk.value = curve.Scalar.Hash(seed)
return sk, nil
}
// GetPublicKey creates a public key from SecretKey sk
func (sk SecretKey) GetPublicKey(curve *curves.PairingCurve) (*PublicKey, error) {
if sk.value == nil || curve == nil {
return nil, fmt.Errorf("curve and sk value cannot be nil")
}
value := curve.Scalar.Point().(curves.PairingPoint).OtherGroup().Generator().Mul(sk.value)
return &PublicKey{value.(curves.PairingPoint)}, nil
}
// MarshalBinary converts SecretKey to bytes
func (sk SecretKey) MarshalBinary() ([]byte, error) {
if sk.value == nil {
return nil, fmt.Errorf("sk cannot be empty")
}
tv := &structMarshal{
Value: sk.value.Bytes(),
Curve: sk.value.Point().CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets SecretKey from bytes
func (sk *SecretKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().SetBytes(tv.Value)
if err != nil {
return err
}
sk.value = value
return nil
}
// BatchAdditions computes product(y + sk) for y in additions and output the product
func (sk SecretKey) BatchAdditions(additions []Element) (Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key cannot be empty")
}
mul := sk.value.One()
for i := 0; i < len(additions); i++ {
if additions[i] == nil {
return nil, fmt.Errorf("some element in additions is nil")
}
// y + alpha
temp := additions[i].Add(sk.value)
// prod(y + alpha)
mul = mul.Mul(temp)
}
return mul, nil
}
// BatchDeletions computes 1/product(y + sk) for y in deletions and output it
func (sk SecretKey) BatchDeletions(deletions []Element) (Element, error) {
v, err := sk.BatchAdditions(deletions)
if err != nil {
return nil, err
}
y, err := v.Invert()
if err != nil {
return nil, err
}
return y, nil
}
// CreateCoefficients creates the Batch Polynomial coefficients
// See page 7 of https://eprint.iacr.org/2020/777.pdf
func (sk SecretKey) CreateCoefficients(additions []Element, deletions []Element) ([]Element, error) {
if sk.value == nil {
return nil, fmt.Errorf("secret key should not be nil")
}
// vD(x) = ∑^{m}_{s=1}{ ∏ 1..s {yD_i + alpha}^-1 ∏ 1 ..s-1 {yD_j - x}
one := sk.value.One()
m1 := one.Neg() // m1 is -1
vD := make(polynomial, 0, len(deletions))
for s := 0; s < len(deletions); s++ {
// ∏ 1..s (yD_i + alpha)^-1
c, err := sk.BatchDeletions(deletions[0 : s+1])
if err != nil {
return nil, fmt.Errorf("error in sk batchDeletions")
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ 1..(s-1) (yD_j - x)
for j := 0; j < s; j++ {
t := make(polynomial, 2)
// yD_j
t[0] = deletions[j]
// -x
t[1] = m1
// polynomial multiplication (yD_1-x) * (yD_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vD, err = vD.Add(poly)
if err != nil {
return nil, err
}
}
//vD(x) * ∏ 1..n (yA_i + alpha)
bAdd, err := sk.BatchAdditions(additions)
if err != nil {
return nil, fmt.Errorf("error in sk batchAdditions")
}
vD, err = vD.MulScalar(bAdd)
if err != nil {
return nil, err
}
// vA(x) = ∑^n_{s=1}{ ∏ 1..s-1 {yA_i + alpha} ∏ s+1..n {yA_j - x} }
vA := make(polynomial, 0, len(additions))
for s := 0; s < len(additions); s++ {
// ∏ 1..s-1 {yA_i + alpha}
var c Element
if s == 0 {
c = one
} else {
c, err = sk.BatchAdditions(additions[0:s])
if err != nil {
return nil, err
}
}
poly := make(polynomial, 1, s+2)
poly[0] = one
// ∏ s+1..n {yA_j - x}
for j := s + 1; j < len(additions); j++ {
t := make(polynomial, 2)
t[0] = additions[j]
t[1] = m1
// polynomial multiplication (yA_1-x) * (yA_2 - x) ...
poly, err = poly.Mul(t)
if err != nil {
return nil, err
}
}
poly, err = poly.MulScalar(c)
if err != nil {
return nil, err
}
vA, err = vA.Add(poly)
if err != nil {
return nil, err
}
}
// vA - vD
vA, err = vA.Sub(vD)
if err != nil {
return nil, err
}
result := make([]Element, len(vA))
for i := 0; i < len(vA); i++ {
result[i] = vA[i]
}
return result, nil
}
// PublicKey is the public key of accumulator, it should be sk * generator of G2
type PublicKey struct {
value curves.PairingPoint
}
// MarshalBinary converts PublicKey to bytes
func (pk PublicKey) MarshalBinary() ([]byte, error) {
if pk.value == nil {
return nil, fmt.Errorf("public key cannot be nil")
}
tv := &structMarshal{
Value: pk.value.ToAffineCompressed(),
Curve: pk.value.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary sets PublicKey from bytes
func (pk *PublicKey) UnmarshalBinary(data []byte) error {
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetPairingCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
value, err := curve.NewScalar().Point().FromAffineCompressed(tv.Value)
if err != nil {
return err
}
var ok bool
pk.value, ok = value.(curves.PairingPoint)
if !ok {
return errors.New("can't convert to PairingPoint")
}
return nil
}

View File

@ -0,0 +1,88 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestSecretKeyMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
data, err := SecretKey{curve.Scalar.One()}.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
e := &SecretKey{curve.Scalar.New(2)}
err = e.UnmarshalBinary(data)
require.NoError(t, err)
require.Equal(t, e.value.Bytes(), curve.Scalar.One().Bytes())
// element cannot be empty
_, err = SecretKey{}.MarshalBinary()
require.Error(t, err)
}
func TestPublicKeyMarshal(t *testing.T) {
// Actually test both toBytes() and from()
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk := &SecretKey{curve.Scalar.New(3)}
pk, _ := sk.GetPublicKey(curve)
pkBytes, err := pk.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, pkBytes)
pk2 := &PublicKey{}
err = pk2.UnmarshalBinary(pkBytes)
require.NoError(t, err)
require.True(t, pk.value.Equal(pk2.value))
}
func TestBatch(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
sk, _ := new(SecretKey).New(curve, seed[:])
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
elements := []Element{element1, element2}
add, err := sk.BatchAdditions(elements)
require.NoError(t, err)
require.NotNil(t, add)
del, err := sk.BatchDeletions(elements)
require.NoError(t, err)
require.NotNil(t, del)
result := add.Mul(del)
require.Equal(t, result, curve.Scalar.One())
g1 := curve.PointG1.Generator()
acc := g1.Mul(add)
require.NotEqual(t, acc, g1)
acc = acc.Mul(del)
require.Equal(t, acc.ToAffineCompressed(), g1.ToAffineCompressed())
acc2 := g1.Mul(result)
require.True(t, acc2.Equal(g1))
}
func TestCoefficient(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
element1 := curve.Scalar.Hash([]byte("value1"))
element2 := curve.Scalar.Hash([]byte("value2"))
element3 := curve.Scalar.Hash([]byte("value3"))
element4 := curve.Scalar.Hash([]byte("value4"))
element5 := curve.Scalar.Hash([]byte("value5"))
elements := []Element{element1, element2, element3, element4, element5}
coefficients, err := sk.CreateCoefficients(elements[0:2], elements[2:5])
require.NoError(t, err)
require.Equal(t, len(coefficients), 3)
}

View File

@ -0,0 +1,204 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"fmt"
"math"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// dad constructs two polynomials - dA(x) and dD(x)
// dA(y) = prod(y_A,t - y), t = 1...n
// dD(y) = prod(y_D,t - y), t = 1...n
func dad(values []Element, y Element) (Element, error) {
if values == nil || y == nil {
return nil, fmt.Errorf("curve, values or y should not be nil")
}
for _, value := range values {
if value == nil {
return nil, fmt.Errorf("some element is nil")
}
}
result := y.One()
if len(values) == 1 {
a := values[0]
result = a.Sub(y)
} else {
for i := 0; i < len(values); i++ {
temp := values[i].Sub(y)
result = result.Mul(temp)
}
}
return result, nil
}
type polynomialPoint []curves.Point
// evaluate evaluates a PolynomialG1 on input x.
func (p polynomialPoint) evaluate(x curves.Scalar) (curves.Point, error) {
if p == nil {
return nil, fmt.Errorf("p cannot be empty")
}
for i := 0; i < len(p); i++ {
if p[i] == nil {
return nil, fmt.Errorf("some coefficient in p is nil")
}
}
pp := x
res := p[0]
for i := 1; i < len(p); i++ {
r := p[i].Mul(pp)
res = res.Add(r)
pp = pp.Mul(x)
}
return res, nil
}
// Add adds two PolynomialG1
func (p polynomialPoint) Add(rhs polynomialPoint) (polynomialPoint, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make(polynomialPoint, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Add(c.Identity())
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Add(c.Identity())
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Mul for PolynomialG1 computes rhs * p, p is a polynomial, rhs is a value
func (p polynomialPoint) Mul(rhs curves.Scalar) (polynomialPoint, error) {
result := make(polynomialPoint, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}
type polynomial []curves.Scalar
// Add adds two polynomials
func (p polynomial) Add(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Clone()
} else {
result[i] = result[i].Add(c)
}
}
return result, nil
}
// Sub computes p-rhs and returns
func (p polynomial) Sub(rhs polynomial) (polynomial, error) {
maxLen := int(math.Max(float64(len(p)), float64(len(rhs))))
result := make([]curves.Scalar, maxLen)
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
result[i] = c.Clone()
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("invalid coefficient at %d", i)
}
if result[i] == nil {
result[i] = c.Neg()
} else {
result[i] = result[i].Sub(c)
}
}
return result, nil
}
// Mul multiplies two polynomials - p * rhs
func (p polynomial) Mul(rhs polynomial) (polynomial, error) {
// Check for each coefficient that should not be nil
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient in p at %d is nil", i)
}
}
for i, c := range rhs {
if c == nil {
return nil, fmt.Errorf("coefficient in rhs at %d is nil", i)
}
}
m := len(p)
n := len(rhs)
// Initialize the product polynomial
prod := make(polynomial, m+n-1)
for i := 0; i < len(prod); i++ {
prod[i] = p[0].Zero()
}
// Multiply two polynomials term by term
for i, cp := range p {
for j, cr := range rhs {
temp := cp.Mul(cr)
prod[i+j] = prod[i+j].Add(temp)
}
}
return prod, nil
}
// MulScalar computes p * rhs, where rhs is a scalar value
func (p polynomial) MulScalar(rhs curves.Scalar) (polynomial, error) {
result := make(polynomial, len(p))
for i, c := range p {
if c == nil {
return nil, fmt.Errorf("coefficient at %d is nil", i)
}
result[i] = c.Mul(rhs)
}
return result, nil
}

View File

@ -0,0 +1,405 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestEvaluatePolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
output1, err := poly.evaluate(curve.Scalar.New(1))
require.NoError(t, err)
require.NotNil(t, output1)
result1 := curve.PointG1.Generator().Mul(curve.Scalar.New(6))
require.Equal(t, output1.ToAffineCompressed(), result1.ToAffineCompressed())
output2, err := poly.evaluate(curve.Scalar.New(2))
require.NoError(t, err)
require.NotNil(t, output2)
result2 := curve.PointG1.Generator().Mul(curve.Scalar.New(11))
require.Equal(t, output2.ToAffineCompressed(), result2.ToAffineCompressed())
}
func TestEvaluatePolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
_, err := poly.evaluate(curve.Scalar.New(1))
require.Error(t, err)
}
func TestAddAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].ToAffineCompressed(), result[i].ToAffineCompressed())
}
// Test polynomials with unequal length
poly3 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(4)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].ToAffineCompressed(), result2[i].ToAffineCompressed())
}
// Test polynomial with Capacity
poly4 := make(polynomialPoint, 0, 3)
poly5, err := poly4.Add(poly1)
require.NoError(t, err)
require.Equal(t, len(poly5), len(poly1))
for i := 0; i < len(poly5); i++ {
require.Equal(t, poly5[i].ToAffineCompressed(), poly1[i].ToAffineCompressed())
}
}
func TestAddAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPolyG1(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.NoError(t, err)
require.NotNil(t, output)
poly2 := polynomialPoint{
curve.PointG1.Generator().Mul(curve.Scalar.New(9)),
curve.PointG1.Generator().Mul(curve.Scalar.New(6)),
curve.PointG1.Generator().Mul(curve.Scalar.New(3)),
}
for i := 0; i < len(poly2); i++ {
require.Equal(t, output[i].ToAffineCompressed(), poly2[i].ToAffineCompressed())
}
}
func TestMulAssignPolyG1Error(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomialPoint{
nil,
curve.PointG1.Generator().Mul(curve.Scalar.New(2)),
curve.PointG1.Generator().Mul(curve.Scalar.New(1)),
}
rhs := curve.Scalar.New(3)
output, err := poly.Mul(rhs)
require.Error(t, err)
require.Nil(t, output)
}
func TestPushPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
scalar := curve.Scalar.New(4)
result := append(poly, scalar)
require.Equal(t, result[3], scalar)
// Push one more
scalar2 := curve.Scalar.New(5)
result2 := append(result, scalar2)
require.Equal(t, result2[4], scalar2)
// Push to a new polynomial
newPoly := polynomial{}
newPoly = append(newPoly, scalar)
require.Equal(t, newPoly[0], scalar)
newPoly = append(newPoly, scalar2)
require.Equal(t, newPoly[1], scalar2)
}
func TestAddAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(4),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i], result[i])
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Add(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(4),
curve.Scalar.New(4),
curve.Scalar.New(1),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i], result2[i])
}
}
func TestAddAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Add(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestSubAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
}
for i := 0; i < len(output); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
curve.Scalar.New(4),
}
output2, err := poly1.Sub(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(2),
curve.Scalar.New(0),
curve.Scalar.New(-2),
curve.Scalar.New(-4),
}
require.Equal(t, len(output2), len(result2))
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestSubAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Sub(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
// Test polynomial with equal length
poly1 := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.NoError(t, err)
require.NotNil(t, output)
result := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(14),
curve.Scalar.New(8),
curve.Scalar.New(3),
}
for i := 0; i < len(result); i++ {
require.Equal(t, output[i].Bytes(), result[i].Bytes())
}
// Test polynomials with unequal length
poly3 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
}
output2, err := poly1.Mul(poly3)
require.NoError(t, err)
require.NotNil(t, output2)
result2 := []curves.Scalar{
curve.Scalar.New(3),
curve.Scalar.New(8),
curve.Scalar.New(5),
curve.Scalar.New(2),
}
require.Equal(t, len(output2), 4)
for i := 0; i < len(output2); i++ {
require.Equal(t, output2[i].Bytes(), result2[i].Bytes())
}
}
func TestMulAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly1 := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
poly2 := polynomial{
curve.Scalar.New(1),
curve.Scalar.New(2),
curve.Scalar.New(3),
}
output, err := poly1.Mul(poly2)
require.Error(t, err)
require.Nil(t, output)
}
func TestMulValueAssignPoly(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
curve.Scalar.New(3),
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.NoError(t, err)
require.NotNil(t, output)
coefficients2 := []curves.Scalar{
curve.Scalar.New(9),
curve.Scalar.New(6),
curve.Scalar.New(3),
}
for i := 0; i < len(coefficients2); i++ {
require.Equal(t, output[i].Bytes(), coefficients2[i].Bytes())
}
}
func TestMulValueAssignPolyError(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
poly := polynomial{
nil,
curve.Scalar.New(2),
curve.Scalar.New(1),
}
rhs := curve.Scalar.New(3)
output, err := poly.MulScalar(rhs)
require.Error(t, err)
require.Nil(t, output)
}

View File

@ -0,0 +1,518 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"bytes"
crand "crypto/rand"
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
type proofParamsMarshal struct {
X []byte `bare:"x"`
Y []byte `bare:"y"`
Z []byte `bare:"z"`
Curve string `bare:"curve"`
}
// ProofParams contains four distinct public generators of G1 - X, Y, Z
type ProofParams struct {
x, y, z curves.Point
}
// New samples X, Y, Z, K
func (p *ProofParams) New(curve *curves.PairingCurve, pk *PublicKey, entropy []byte) (*ProofParams, error) {
pkBytes, err := pk.MarshalBinary()
if err != nil {
return nil, err
}
prefix := bytes.Repeat([]byte{0xFF}, 32)
data := append(prefix, entropy...)
data = append(data, pkBytes...)
p.z = curve.Scalar.Point().Hash(data)
data[0] = 0xFE
p.y = curve.Scalar.Point().Hash(data)
data[0] = 0xFD
p.x = curve.Scalar.Point().Hash(data)
return p, nil
}
// MarshalBinary converts ProofParams to bytes
func (p *ProofParams) MarshalBinary() ([]byte, error) {
if p.x == nil || p.y == nil || p.z == nil {
return nil, fmt.Errorf("some value x, y, or z is nil")
}
tv := &proofParamsMarshal{
X: p.x.ToAffineCompressed(),
Y: p.y.ToAffineCompressed(),
Z: p.z.ToAffineCompressed(),
Curve: p.x.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to ProofParams
func (p *ProofParams) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(proofParamsMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
x, err := curve.NewIdentityPoint().FromAffineCompressed(tv.X)
if err != nil {
return err
}
y, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Y)
if err != nil {
return err
}
z, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Z)
if err != nil {
return err
}
p.x = x
p.y = y
p.z = z
return nil
}
// MembershipProofCommitting contains value computed in Proof of knowledge and
// Blinding phases as described in section 7 of https://eprint.iacr.org/2020/777.pdf
type MembershipProofCommitting struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
deltaSigma curves.Scalar
deltaRho curves.Scalar
blindingFactor curves.Scalar
rSigma curves.Scalar
rRho curves.Scalar
rDeltaSigma curves.Scalar
rDeltaRho curves.Scalar
sigma curves.Scalar
rho curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
capRE curves.Scalar
accumulator curves.Point
witnessValue curves.Scalar
xG1 curves.Point
yG1 curves.Point
zG1 curves.Point
}
// New initiates values of MembershipProofCommitting
func (mpc *MembershipProofCommitting) New(
witness *MembershipWitness,
acc *Accumulator,
pp *ProofParams,
pk *PublicKey,
) (*MembershipProofCommitting, error) {
// Randomly select σ, ρ
sigma := witness.y.Random(crand.Reader)
rho := witness.y.Random(crand.Reader)
// E_C = C + (σ + ρ)Z
t := sigma
t = t.Add(rho)
eC := pp.z
eC = eC.Mul(t)
eC = eC.Add(witness.c)
// T_σ = σX
tSigma := pp.x
tSigma = tSigma.Mul(sigma)
// T_ρ = ρY
tRho := pp.y
tRho = tRho.Mul(rho)
// δ_σ = yσ
deltaSigma := witness.y
deltaSigma = deltaSigma.Mul(sigma)
// δ_ρ = yρ
deltaRho := witness.y
deltaRho = deltaRho.Mul(rho)
// Randomly pick r_σ,r_ρ,r_δσ,r_δρ
rY := witness.y.Random(crand.Reader)
rSigma := witness.y.Random(crand.Reader)
rRho := witness.y.Random(crand.Reader)
rDeltaSigma := witness.y.Random(crand.Reader)
rDeltaRho := witness.y.Random(crand.Reader)
// R_σ = r_σ X
capRSigma := pp.x
capRSigma = capRSigma.Mul(rSigma)
// R_ρ = ρY
capRRho := pp.y
capRRho = capRRho.Mul(rRho)
// R_δσ = r_y T_σ - r_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := tSigma.Mul(rY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(rDeltaSigma))
// R_δρ = r_y T_ρ - r_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := tRho.Mul(rY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(rDeltaRho))
// P~
g2 := pk.value.Generator()
// -r_δσ - r_δρ
exp := rDeltaSigma
exp = exp.Add(rDeltaRho)
exp = exp.Neg()
// -r_σ - r_ρ
exp2 := rSigma
exp2 = exp2.Add(rRho)
exp2 = exp2.Neg()
// rY * eC
rYeC := eC.Mul(rY)
// (-r_δσ - r_δρ)*Z
expZ := pp.z.Mul(exp)
// (-r_σ - r_ρ)*Z
exp2Z := pp.z.Mul(exp2)
// Prepare
rYeCPrep, ok := rYeC.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
expZPrep, ok := expZ.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
exp2ZPrep, ok := exp2Z.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// Pairing
capRE := g2Prep.MultiPairing(rYeCPrep, g2Prep, expZPrep, g2Prep, exp2ZPrep, pkPrep)
return &MembershipProofCommitting{
eC,
tSigma,
tRho,
deltaSigma,
deltaRho,
rY,
rSigma,
rRho,
rDeltaSigma,
rDeltaRho,
sigma,
rho,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
capRE,
acc.value,
witness.y,
pp.x,
pp.y,
pp.z,
}, nil
}
// GetChallenge returns bytes that need to be hashed for generating challenge.
// V || Ec || T_sigma || T_rho || R_E || R_sigma || R_rho || R_delta_sigma || R_delta_rho
func (mpc MembershipProofCommitting) GetChallengeBytes() []byte {
res := mpc.accumulator.ToAffineCompressed()
res = append(res, mpc.eC.ToAffineCompressed()...)
res = append(res, mpc.tSigma.ToAffineCompressed()...)
res = append(res, mpc.tRho.ToAffineCompressed()...)
res = append(res, mpc.capRE.Bytes()...)
res = append(res, mpc.capRSigma.ToAffineCompressed()...)
res = append(res, mpc.capRRho.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, mpc.capRDeltaRho.ToAffineCompressed()...)
return res
}
// GenProof computes the s values for Fiat-Shamir and return the actual
// proof to be sent to the verifier given the challenge c.
func (mpc *MembershipProofCommitting) GenProof(c curves.Scalar) *MembershipProof {
// s_y = r_y + c*y
sY := schnorr(mpc.blindingFactor, mpc.witnessValue, c)
// s_σ = r_σ + c*σ
sSigma := schnorr(mpc.rSigma, mpc.sigma, c)
// s_ρ = r_ρ + c*ρ
sRho := schnorr(mpc.rRho, mpc.rho, c)
// s_δσ = rδσ + c*δ_σ
sDeltaSigma := schnorr(mpc.rDeltaSigma, mpc.deltaSigma, c)
// s_δρ = rδρ + c*δ_ρ
sDeltaRho := schnorr(mpc.rDeltaRho, mpc.deltaRho, c)
return &MembershipProof{
mpc.eC,
mpc.tSigma,
mpc.tRho,
sSigma,
sRho,
sDeltaSigma,
sDeltaRho,
sY,
}
}
func schnorr(r, v, challenge curves.Scalar) curves.Scalar {
res := v
res = res.Mul(challenge)
res = res.Add(r)
return res
}
type membershipProofMarshal struct {
EC []byte `bare:"e_c"`
TSigma []byte `bare:"t_sigma"`
TRho []byte `bare:"t_rho"`
SSigma []byte `bare:"s_sigma"`
SRho []byte `bare:"s_rho"`
SDeltaSigma []byte `bare:"s_delta_sigma"`
SDeltaRho []byte `bare:"s_delta_rho"`
SY []byte `bare:"s_y"`
Curve string `bare:"curve"`
}
// MembershipProof contains values in the proof to be verified
type MembershipProof struct {
eC curves.Point
tSigma curves.Point
tRho curves.Point
sSigma curves.Scalar
sRho curves.Scalar
sDeltaSigma curves.Scalar
sDeltaRho curves.Scalar
sY curves.Scalar
}
// Finalize computes values in the proof to be verified.
func (mp *MembershipProof) Finalize(acc *Accumulator, pp *ProofParams, pk *PublicKey, challenge curves.Scalar) (*MembershipProofFinal, error) {
// R_σ = s_δ X + c T_σ
negTSigma := mp.tSigma
negTSigma = negTSigma.Neg()
capRSigma := pp.x.Mul(mp.sSigma)
capRSigma = capRSigma.Add(negTSigma.Mul(challenge))
// R_ρ = s_ρ Y + c T_ρ
negTRho := mp.tRho
negTRho = negTRho.Neg()
capRRho := pp.y.Mul(mp.sRho)
capRRho = capRRho.Add(negTRho.Mul(challenge))
// R_δσ = s_y T_σ - s_δσ X
negX := pp.x
negX = negX.Neg()
capRDeltaSigma := mp.tSigma.Mul(mp.sY)
capRDeltaSigma = capRDeltaSigma.Add(negX.Mul(mp.sDeltaSigma))
// R_δρ = s_y T_ρ - s_δρ Y
negY := pp.y
negY = negY.Neg()
capRDeltaRho := mp.tRho.Mul(mp.sY)
capRDeltaRho = capRDeltaRho.Add(negY.Mul(mp.sDeltaRho))
// tildeP
g2 := pk.value.Generator()
// Compute capRE, the pairing
// E_c * s_y
eCsY := mp.eC.Mul(mp.sY)
// (-s_delta_sigma - s_delta_rho) * Z
exp := mp.sDeltaSigma
exp = exp.Add(mp.sDeltaRho)
exp = exp.Neg()
expZ := pp.z.Mul(exp)
// (-c) * V
exp = challenge.Neg()
expV := acc.value.Mul(exp)
// E_c * s_y + (-s_delta_sigma - s_delta_rho) * Z + (-c) * V
lhs := eCsY.Add(expZ).Add(expV)
// (-s_sigma - s_rho) * Z
exp = mp.sSigma
exp = exp.Add(mp.sRho)
exp = exp.Neg()
expZ2 := pp.z.Mul(exp)
// E_c * c
cEc := mp.eC.Mul(challenge)
// (-s_sigma - s_rho) * Z + E_c * c
rhs := cEc.Add(expZ2)
// Prepare
lhsPrep, ok := lhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
g2Prep, ok := g2.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
rhsPrep, ok := rhs.(curves.PairingPoint)
if !ok {
return nil, errors.New("incorrect type conversion")
}
pkPrep := pk.value
// capRE
capRE := g2Prep.MultiPairing(lhsPrep, g2Prep, rhsPrep, pkPrep)
return &MembershipProofFinal{
acc.value,
mp.eC,
mp.tSigma,
mp.tRho,
capRE,
capRSigma,
capRRho,
capRDeltaSigma,
capRDeltaRho,
}, nil
}
// MarshalBinary converts MembershipProof to bytes
func (mp MembershipProof) MarshalBinary() ([]byte, error) {
tv := &membershipProofMarshal{
EC: mp.eC.ToAffineCompressed(),
TSigma: mp.tSigma.ToAffineCompressed(),
TRho: mp.tRho.ToAffineCompressed(),
SSigma: mp.sSigma.Bytes(),
SRho: mp.sRho.Bytes(),
SDeltaSigma: mp.sDeltaSigma.Bytes(),
SDeltaRho: mp.sDeltaRho.Bytes(),
SY: mp.sY.Bytes(),
Curve: mp.eC.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes to MembershipProof
func (mp *MembershipProof) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(membershipProofMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
eC, err := curve.NewIdentityPoint().FromAffineCompressed(tv.EC)
if err != nil {
return err
}
tSigma, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TSigma)
if err != nil {
return err
}
tRho, err := curve.NewIdentityPoint().FromAffineCompressed(tv.TRho)
if err != nil {
return err
}
sSigma, err := curve.NewScalar().SetBytes(tv.SSigma)
if err != nil {
return err
}
sRho, err := curve.NewScalar().SetBytes(tv.SRho)
if err != nil {
return err
}
sDeltaSigma, err := curve.NewScalar().SetBytes(tv.SDeltaSigma)
if err != nil {
return err
}
sDeltaRho, err := curve.NewScalar().SetBytes(tv.SDeltaRho)
if err != nil {
return err
}
sY, err := curve.NewScalar().SetBytes(tv.SY)
if err != nil {
return err
}
mp.eC = eC
mp.tSigma = tSigma
mp.tRho = tRho
mp.sSigma = sSigma
mp.sRho = sRho
mp.sDeltaSigma = sDeltaSigma
mp.sDeltaRho = sDeltaRho
mp.sY = sY
return nil
}
// MembershipProofFinal contains values that are input to Fiat-Shamir Heuristic
type MembershipProofFinal struct {
accumulator curves.Point
eC curves.Point
tSigma curves.Point
tRho curves.Point
capRE curves.Scalar
capRSigma curves.Point
capRRho curves.Point
capRDeltaSigma curves.Point
capRDeltaRho curves.Point
}
// GetChallenge computes Fiat-Shamir Heuristic taking input values of MembershipProofFinal
func (m MembershipProofFinal) GetChallenge(curve *curves.PairingCurve) curves.Scalar {
res := m.accumulator.ToAffineCompressed()
res = append(res, m.eC.ToAffineCompressed()...)
res = append(res, m.tSigma.ToAffineCompressed()...)
res = append(res, m.tRho.ToAffineCompressed()...)
res = append(res, m.capRE.Bytes()...)
res = append(res, m.capRSigma.ToAffineCompressed()...)
res = append(res, m.capRRho.ToAffineCompressed()...)
res = append(res, m.capRDeltaSigma.ToAffineCompressed()...)
res = append(res, m.capRDeltaRho.ToAffineCompressed()...)
challenge := curve.Scalar.Hash(res)
return challenge
}

View File

@ -0,0 +1,182 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestProofParamsMarshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
bytes, err := params.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, bytes)
params2 := &ProofParams{
curve.PointG1.Generator(),
curve.PointG1.Generator(),
curve.PointG1.Generator(),
}
err = params2.UnmarshalBinary(bytes)
require.NoError(t, err)
require.True(t, params.x.Equal(params2.x))
require.True(t, params.y.Equal(params2.y))
require.True(t, params.z.Equal(params2.z))
}
func TestMembershipProof(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// Initiate a new accumulator
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
// Initiate a new membership witness for value elements[3]
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
// Create proof parameters, which contains randomly sampled G1 points X, Y, Z, K
params, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, params.x)
require.NotNil(t, params.y)
require.NotNil(t, params.z)
mpc, err := new(MembershipProofCommitting).New(wit, acc, params, pk)
require.NoError(t, err)
testMPC(t, mpc)
challenge := curve.Scalar.Hash(mpc.GetChallengeBytes())
require.NotNil(t, challenge)
proof := mpc.GenProof(challenge)
require.NotNil(t, proof)
testProof(t, proof)
finalProof, err := proof.Finalize(acc, params, pk, challenge)
require.NoError(t, err)
require.NotNil(t, finalProof)
testFinalProof(t, finalProof)
challenge2 := finalProof.GetChallenge(curve)
require.Equal(t, challenge, challenge2)
// Check we can still have a valid proof even if accumulator and witness are updated
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
newParams, err := new(ProofParams).New(curve, pk, []byte("entropy"))
require.NoError(t, err)
require.NotNil(t, newParams.x)
require.NotNil(t, newParams.y)
require.NotNil(t, newParams.z)
newMPC, err := new(MembershipProofCommitting).New(wit, acc, newParams, pk)
require.NoError(t, err)
testMPC(t, newMPC)
challenge3 := curve.Scalar.Hash(newMPC.GetChallengeBytes())
require.NotNil(t, challenge3)
newProof := newMPC.GenProof(challenge3)
require.NotNil(t, newProof)
testProof(t, newProof)
newFinalProof, err := newProof.Finalize(acc, newParams, pk, challenge3)
require.NoError(t, err)
require.NotNil(t, newFinalProof)
testFinalProof(t, newFinalProof)
challenge4 := newFinalProof.GetChallenge(curve)
require.Equal(t, challenge3, challenge4)
}
func testMPC(t *testing.T, mpc *MembershipProofCommitting) {
require.NotNil(t, mpc.eC)
require.NotNil(t, mpc.tSigma)
require.NotNil(t, mpc.tRho)
require.NotNil(t, mpc.deltaSigma)
require.NotNil(t, mpc.deltaRho)
require.NotNil(t, mpc.blindingFactor)
require.NotNil(t, mpc.rSigma)
require.NotNil(t, mpc.rRho)
require.NotNil(t, mpc.rDeltaSigma)
require.NotNil(t, mpc.rDeltaRho)
require.NotNil(t, mpc.sigma)
require.NotNil(t, mpc.rho)
require.NotNil(t, mpc.capRSigma)
require.NotNil(t, mpc.capRRho)
require.NotNil(t, mpc.capRDeltaSigma)
require.NotNil(t, mpc.capRDeltaRho)
require.NotNil(t, mpc.capRE)
require.NotNil(t, mpc.accumulator)
require.NotNil(t, mpc.witnessValue)
require.NotNil(t, mpc.xG1)
require.NotNil(t, mpc.yG1)
require.NotNil(t, mpc.zG1)
}
func testProof(t *testing.T, proof *MembershipProof) {
require.NotNil(t, proof.eC)
require.NotNil(t, proof.tSigma)
require.NotNil(t, proof.tRho)
require.NotNil(t, proof.sSigma)
require.NotNil(t, proof.sRho)
require.NotNil(t, proof.sDeltaSigma)
require.NotNil(t, proof.sDeltaRho)
require.NotNil(t, proof.sY)
}
func testFinalProof(t *testing.T, finalProof *MembershipProofFinal) {
require.NotNil(t, finalProof.accumulator)
require.NotNil(t, finalProof.eC)
require.NotNil(t, finalProof.tSigma)
require.NotNil(t, finalProof.tRho)
require.NotNil(t, finalProof.capRE)
require.NotNil(t, finalProof.capRSigma)
require.NotNil(t, finalProof.capRRho)
require.NotNil(t, finalProof.capRDeltaSigma)
require.NotNil(t, finalProof.capRDeltaRho)
}

View File

@ -0,0 +1,378 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"errors"
"fmt"
"git.sr.ht/~sircmpwn/go-bare"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// MembershipWitness contains the witness c and the value y respect to the accumulator state.
type MembershipWitness struct {
c curves.Point
y curves.Scalar
}
// New creates a new membership witness
func (mw *MembershipWitness) New(y Element, acc *Accumulator, sk *SecretKey) (*MembershipWitness, error) {
if acc.value == nil || acc.value.IsIdentity() {
return nil, fmt.Errorf("value of accumulator should not be nil")
}
if sk.value == nil || sk.value.IsZero() {
return nil, fmt.Errorf("secret key should not be nil")
}
if y == nil || y.IsZero() {
return nil, fmt.Errorf("y should not be nil")
}
newAcc := &Accumulator{acc.value}
_, err := newAcc.Remove(sk, y)
if err != nil {
return nil, err
}
mw.c = newAcc.value
mw.y = y.Add(y.Zero())
return mw, nil
}
// Verify the MembershipWitness mw is a valid witness as per section 4 in
// <https://eprint.iacr.org/2020/777>
func (mw MembershipWitness) Verify(pk *PublicKey, acc *Accumulator) error {
if mw.c == nil || mw.y == nil || mw.c.IsIdentity() || mw.y.IsZero() {
return fmt.Errorf("c and y should not be nil")
}
if pk.value == nil || pk.value.IsIdentity() {
return fmt.Errorf("invalid public key")
}
if acc.value == nil || acc.value.IsIdentity() {
return fmt.Errorf("accumulator value should not be nil")
}
// Set -tildeP
g2, ok := pk.value.Generator().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// y*tildeP + tildeQ, tildeP is a G2 generator.
p, ok := g2.Mul(mw.y).Add(pk.value).(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Prepare
witness, ok := mw.c.(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
v, ok := acc.value.Neg().(curves.PairingPoint)
if !ok {
return errors.New("incorrect type conversion")
}
// Check e(witness, y*tildeP + tildeQ) * e(-acc, tildeP) == Identity
result := p.MultiPairing(witness, p, v, g2)
if !result.IsOne() {
return fmt.Errorf("invalid result")
}
return nil
}
// ApplyDelta returns C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
// according to the witness update protocol described in section 4 of
// https://eprint.iacr.org/2020/777.pdf
func (mw *MembershipWitness) ApplyDelta(delta *Delta) (*MembershipWitness, error) {
if mw.c == nil || mw.y == nil || delta == nil {
return nil, fmt.Errorf("y, c or delta should not be nil")
}
// C' = dA(y)/dD(y)*C + 1/dD(y) * <Gamma_y, Omega>
mw.c = mw.c.Mul(delta.d).Add(delta.p)
return mw, nil
}
// BatchUpdate performs batch update as described in section 4
func (mw *MembershipWitness) BatchUpdate(additions []Element, deletions []Element, coefficients []Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDelta(mw.y, additions, deletions, coefficients)
if err != nil {
return nil, err
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, fmt.Errorf("applyDelta fails")
}
return mw, nil
}
// MultiBatchUpdate performs multi-batch update using epoch as described in section 4.2
func (mw *MembershipWitness) MultiBatchUpdate(A [][]Element, D [][]Element, C [][]Coefficient) (*MembershipWitness, error) {
delta, err := evaluateDeltas(mw.y, A, D, C)
if err != nil {
return nil, fmt.Errorf("evaluateDeltas fails")
}
mw, err = mw.ApplyDelta(delta)
if err != nil {
return nil, err
}
return mw, nil
}
// MarshalBinary converts a membership witness to bytes
func (mw MembershipWitness) MarshalBinary() ([]byte, error) {
if mw.c == nil || mw.y == nil {
return nil, fmt.Errorf("c and y value should not be nil")
}
result := append(mw.c.ToAffineCompressed(), mw.y.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: mw.c.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts bytes into MembershipWitness
func (mw *MembershipWitness) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("input data should not be nil")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
cValue, err := curve.Point.FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
yValue, err := curve.Scalar.SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
mw.c = cValue
mw.y = yValue
return nil
}
// Delta contains values d and p, where d should be the division dA(y)/dD(y) on some value y
// p should be equal to 1/dD * <Gamma_y, Omega>
type Delta struct {
d curves.Scalar
p curves.Point
}
// MarshalBinary converts Delta into bytes
func (d *Delta) MarshalBinary() ([]byte, error) {
if d.d == nil || d.p == nil {
return nil, fmt.Errorf("d and p should not be nil")
}
var result []byte
result = append(result, d.p.ToAffineCompressed()...)
result = append(result, d.d.Bytes()...)
tv := &structMarshal{
Value: result,
Curve: d.p.CurveName(),
}
return bare.Marshal(tv)
}
// UnmarshalBinary converts data into Delta
func (d *Delta) UnmarshalBinary(data []byte) error {
if data == nil {
return fmt.Errorf("expected non-zero byte sequence")
}
tv := new(structMarshal)
err := bare.Unmarshal(data, tv)
if err != nil {
return err
}
curve := curves.GetCurveByName(tv.Curve)
if curve == nil {
return fmt.Errorf("invalid curve")
}
ptLength := len(curve.Point.ToAffineCompressed())
scLength := len(curve.Scalar.Bytes())
expectedLength := ptLength + scLength
if len(tv.Value) != expectedLength {
return fmt.Errorf("invalid byte sequence")
}
pValue, err := curve.NewIdentityPoint().FromAffineCompressed(tv.Value[:ptLength])
if err != nil {
return err
}
dValue, err := curve.NewScalar().SetBytes(tv.Value[ptLength:])
if err != nil {
return err
}
if err != nil {
return err
}
d.d = dValue
d.p = pValue
return nil
}
// evaluateDeltas compute values used for membership witness batch update with epoch
// as described in section 4.2, page 11 of https://eprint.iacr.org/2020/777.pdf
func evaluateDeltas(y Element, A [][]Element, D [][]Element, C [][]Coefficient) (*Delta, error) {
if len(A) != len(D) || len(A) != len(C) {
return nil, fmt.Errorf("a, d, c should have same length")
}
one := y.One()
size := len(A)
// dA(x) = ∏ 1..n (yA_i - x)
aa := make([]curves.Scalar, 0)
// dD(x) = ∏ 1..m (yD_i - x)
dd := make([]curves.Scalar, 0)
a := one
d := one
// dA_{a->b}(y) = ∏ a..b dAs(y)
// dD_{a->b}(y) = ∏ a..b dDs(y)
for i := 0; i < size; i++ {
adds := A[i]
dels := D[i]
// ta = dAs(y)
ta, err := dad(adds, y)
if err != nil {
return nil, fmt.Errorf("dad on additions fails")
}
// td = dDs(y)
td, err := dad(dels, y)
if err != nil {
return nil, fmt.Errorf("dad on deletions fails")
}
// ∏ a..b dAs(y)
a = a.Mul(ta)
// ∏ a..b dDs(y)
d = d.Mul(td)
aa = append(aa, ta)
dd = append(dd, td)
}
// If this fails, then this value was removed.
d, err := d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
// <Gamma_y, Omega>
p := make(polynomialPoint, 0, size)
// Ωi->j+1 = ∑ 1..t (dAt * dDt-1) · Ω
for i := 0; i < size; i++ {
// t = i+1
// ∏^(t-1)_(h=i+1)
ddh := one
// dDi→t1 (y)
for h := 0; h < i; h++ {
ddh = ddh.Mul(dd[h])
}
// ∏^(j+1)_(k=t+1)
dak := one
// dAt->j(y)
for k := i + 1; k < size; k++ {
dak = dak.Mul(aa[k])
}
// dDi->t-1(y) * dAt->j(y)
dak = dak.Mul(ddh)
pp := make(polynomialPoint, len(C[i]))
for j := 0; j < len(pp); j++ {
pp[j] = C[i][j]
}
// dDi->t-1(y) * dAt->j(y) · Ω
pp, err := pp.Mul(dak)
if err != nil {
return nil, fmt.Errorf("pp.Mul fails")
}
p, err = p.Add(pp)
if err != nil {
return nil, fmt.Errorf("pp.Add fails")
}
}
// dAi->j(y)/dDi->j(y)
a = a.Mul(d)
// Ωi->j(y)
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// (1/dDi->j(y)) * Ωi->j(y)
v = v.Mul(d)
// return
return &Delta{d: a, p: v}, nil
}
// evaluateDelta computes values used for membership witness batch update
// as described in section 4.1 of https://eprint.iacr.org/2020/777.pdf
func evaluateDelta(y Element, additions []Element, deletions []Element, coefficients []Coefficient) (*Delta, error) {
// dD(y) = ∏ 1..m (yD_i - y), d = 1/dD(y)
var err error
d, err := dad(deletions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on deletions")
}
d, err = d.Invert()
if err != nil {
return nil, fmt.Errorf("no inverse exists")
}
//dA(y) = ∏ 1..n (yA_i - y)
a, err := dad(additions, y)
if err != nil {
return nil, fmt.Errorf("dad fails on additions")
}
// dA(y)/dD(y)
a = a.Mul(d)
// Create a PolynomialG1 from coefficients
p := make(polynomialPoint, len(coefficients))
for i := 0; i < len(coefficients); i++ {
p[i] = coefficients[i]
}
// <Gamma_y, Omega>
v, err := p.evaluate(y)
if err != nil {
return nil, fmt.Errorf("p.evaluate fails")
}
// 1/dD * <Gamma_y, Omega>
v = v.Mul(d)
return &Delta{d: a, p: v}, nil
}

View File

@ -0,0 +1,229 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package accumulator
import (
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func Test_Membership_Witness_New(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
var seed [32]byte
key, _ := new(SecretKey).New(curve, seed[:])
acc, _ := new(Accumulator).New(curve)
e := curve.Scalar.New(2)
mw, err := new(MembershipWitness).New(e, acc, key)
require.NoError(t, err)
require.NotNil(t, mw.c)
require.NotNil(t, mw.y)
}
func Test_Membership_Witness_Marshal(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
mw := &MembershipWitness{
curve.PointG1.Generator().Mul(curve.Scalar.New(10)),
curve.Scalar.New(15),
}
data, err := mw.MarshalBinary()
require.NoError(t, err)
require.NotNil(t, data)
newMW := &MembershipWitness{}
err = newMW.UnmarshalBinary(data)
require.NoError(t, err)
require.True(t, mw.c.Equal(newMW.c))
require.Equal(t, 0, mw.y.Cmp(newMW.y))
}
func Test_Membership(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.x
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
require.False(t, acc.value.IsIdentity())
require.True(t, acc.value.IsOnCurve())
require.NotEqual(t, acc.value, curve.NewG1GeneratorPoint())
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.NoError(t, err)
// Test wrong cases, forge a wrong witness
wrongWit := MembershipWitness{
curve.PointG1.Identity(),
curve.Scalar.One(),
}
err = wrongWit.Verify(pk, acc)
require.Error(t, err)
// Test wrong cases, forge a wrong accumulator
wrongAcc := &Accumulator{
curve.PointG1.Generator(),
}
err = wit.Verify(pk, wrongAcc)
require.Error(t, err)
}
func Test_Membership_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
elements := []Element{element1, element2, element3, element4, element5, element6, element7}
// nm_witness_max works as well if set to value larger than 0 for this test.
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
require.Equal(t, wit.y, elements[3])
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
additions := data[0:2]
deletions := data[2:5]
_, coefficients, err := acc.Update(sk, additions, deletions)
require.NoError(t, err)
require.NotNil(t, coefficients)
_, err = wit.BatchUpdate(additions, deletions, coefficients)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}
func Test_Membership_Multi_Batch_Update(t *testing.T) {
curve := curves.BLS12381(&curves.PointBls12381G1{})
sk, _ := new(SecretKey).New(curve, []byte("1234567890"))
pk, _ := sk.GetPublicKey(curve)
element1 := curve.Scalar.Hash([]byte("3"))
element2 := curve.Scalar.Hash([]byte("4"))
element3 := curve.Scalar.Hash([]byte("5"))
element4 := curve.Scalar.Hash([]byte("6"))
element5 := curve.Scalar.Hash([]byte("7"))
element6 := curve.Scalar.Hash([]byte("8"))
element7 := curve.Scalar.Hash([]byte("9"))
element8 := curve.Scalar.Hash([]byte("10"))
element9 := curve.Scalar.Hash([]byte("11"))
element10 := curve.Scalar.Hash([]byte("12"))
element11 := curve.Scalar.Hash([]byte("13"))
element12 := curve.Scalar.Hash([]byte("14"))
element13 := curve.Scalar.Hash([]byte("15"))
element14 := curve.Scalar.Hash([]byte("16"))
element15 := curve.Scalar.Hash([]byte("17"))
element16 := curve.Scalar.Hash([]byte("18"))
element17 := curve.Scalar.Hash([]byte("19"))
element18 := curve.Scalar.Hash([]byte("20"))
elements := []Element{
element1,
element2,
element3,
element4,
element5,
element6,
element7,
element8,
element9,
element10,
element11,
element12,
element13,
element14,
element15,
element16,
element17,
element18,
}
acc, err := new(Accumulator).WithElements(curve, sk, elements)
require.NoError(t, err)
require.NotNil(t, acc.value)
wit, err := new(MembershipWitness).New(elements[3], acc, sk)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
data1 := curve.Scalar.Hash([]byte("1"))
data2 := curve.Scalar.Hash([]byte("2"))
data3 := curve.Scalar.Hash([]byte("3"))
data4 := curve.Scalar.Hash([]byte("4"))
data5 := curve.Scalar.Hash([]byte("5"))
data := []Element{data1, data2, data3, data4, data5}
adds1 := data[0:2]
dels1 := data[2:5]
_, coeffs1, err := acc.Update(sk, adds1, dels1)
require.NoError(t, err)
require.NotNil(t, coeffs1)
dels2 := elements[8:10]
_, coeffs2, err := acc.Update(sk, []Element{}, dels2)
require.NoError(t, err)
require.NotNil(t, coeffs2)
dels3 := elements[11:14]
_, coeffs3, err := acc.Update(sk, []Element{}, dels3)
require.NoError(t, err)
require.NotNil(t, coeffs3)
a := make([][]Element, 3)
a[0] = adds1
a[1] = []Element{}
a[2] = []Element{}
d := make([][]Element, 3)
d[0] = dels1
d[1] = dels2
d[2] = dels3
c := make([][]Coefficient, 3)
c[0] = coeffs1
c[1] = coeffs2
c[2] = coeffs3
_, err = wit.MultiBatchUpdate(a, d, c)
require.NoError(t, err)
err = wit.Verify(pk, acc)
require.Nil(t, err)
}

View File

@ -0,0 +1,57 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"golang.org/x/crypto/sha3"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// generators contains a list of points to be used as generators for bulletproofs.
type generators []curves.Point
// ippGenerators holds generators necessary for an Inner Product Proof
// It includes a single u generator, and a list of generators divided in half to G and H
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippGenerators struct {
G generators
H generators
}
// getGeneratorPoints generates generators using HashToCurve with Shake256(domain) as input
// lenVector is the length of the scalars used for the Inner Product Proof
// getGeneratorPoints will return 2*lenVector + 1 total points, split between a single u generator
// and G and H lists of vectors per the IPP specification
// See lines 10 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func getGeneratorPoints(lenVector int, domain []byte, curve curves.Curve) (*ippGenerators, error) {
shake := sha3.NewShake256()
_, err := shake.Write(domain)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Write")
}
numPoints := lenVector * 2
points := make([]curves.Point, numPoints)
for i := 0; i < numPoints; i++ {
bytes := [64]byte{}
_, err := shake.Read(bytes[:])
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints shake.Read")
}
nextPoint := curve.Point.Hash(bytes[:])
points[i] = nextPoint
}
// Get G and H by splitting points in half
G, H, err := splitPointVector(points)
if err != nil {
return nil, errors.Wrap(err, "getGeneratorPoints splitPointVector")
}
out := ippGenerators{G: G, H: H}
return &out, nil
}

View File

@ -0,0 +1,61 @@
package bulletproof
import (
"testing"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/sha3"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestGeneratorsHappyPath(t *testing.T) {
curve := curves.ED25519()
gs, err := getGeneratorPoints(10, []byte("test"), *curve)
gsConcatenated := concatIPPGenerators(*gs)
require.NoError(t, err)
require.Len(t, gs.G, 10)
require.Len(t, gs.H, 10)
require.True(t, noDuplicates(gsConcatenated))
}
func TestGeneratorsUniquePerDomain(t *testing.T) {
curve := curves.ED25519()
gs1, err := getGeneratorPoints(10, []byte("test"), *curve)
gs1Concatenated := concatIPPGenerators(*gs1)
require.NoError(t, err)
gs2, err := getGeneratorPoints(10, []byte("test2"), *curve)
gs2Concatenated := concatIPPGenerators(*gs2)
require.NoError(t, err)
require.True(t, areDisjoint(gs1Concatenated, gs2Concatenated))
}
func noDuplicates(gs generators) bool {
seen := map[[32]byte]bool{}
for _, G := range gs {
value := sha3.Sum256(G.ToAffineCompressed())
if seen[value] {
return false
}
seen[value] = true
}
return true
}
func areDisjoint(gs1, gs2 generators) bool {
for _, g1 := range gs1 {
for _, g2 := range gs2 {
if g1.Equal(g2) {
return false
}
}
}
return true
}
func concatIPPGenerators(ippGens ippGenerators) generators {
var out generators
out = append(out, ippGens.G...)
out = append(out, ippGens.H...)
return out
}

View File

@ -0,0 +1,181 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package bulletproof
import (
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// innerProduct takes two lists of scalars (a, b) and performs the dot product returning a single scalar.
func innerProduct(a, b []curves.Scalar) (curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
if len(a) < 1 {
return nil, errors.New("length of vectors must be at least one")
}
// Get a new scalar of value zero of the same curve as input arguments
innerProduct := a[0].Zero()
for i, aElem := range a {
bElem := b[i]
// innerProduct = aElem*bElem + innerProduct
innerProduct = aElem.MulAdd(bElem, innerProduct)
}
return innerProduct, nil
}
// splitPointVector takes a vector of points, splits it in half returning each half.
func splitPointVector(points []curves.Point) ([]curves.Point, []curves.Point, error) {
if len(points) < 1 {
return nil, nil, errors.New("length of points must be at least one")
}
if len(points)&0x01 != 0 {
return nil, nil, errors.New("length of points must be even")
}
nPrime := len(points) >> 1
firstHalf := points[:nPrime]
secondHalf := points[nPrime:]
return firstHalf, secondHalf, nil
}
// splitScalarVector takes a vector of scalars, splits it in half returning each half.
func splitScalarVector(scalars []curves.Scalar) ([]curves.Scalar, []curves.Scalar, error) {
if len(scalars) < 1 {
return nil, nil, errors.New("length of scalars must be at least one")
}
if len(scalars)&0x01 != 0 {
return nil, nil, errors.New("length of scalars must be even")
}
nPrime := len(scalars) >> 1
firstHalf := scalars[:nPrime]
secondHalf := scalars[nPrime:]
return firstHalf, secondHalf, nil
}
// multiplyScalarToPointVector takes a single scalar and a list of points, multiplies each point by scalar.
func multiplyScalarToPointVector(x curves.Scalar, g []curves.Point) []curves.Point {
products := make([]curves.Point, len(g))
for i, gElem := range g {
product := gElem.Mul(x)
products[i] = product
}
return products
}
// multiplyScalarToScalarVector takes a single scalar (x) and a list of scalars (a), multiplies each scalar in the vector by the scalar.
func multiplyScalarToScalarVector(x curves.Scalar, a []curves.Scalar) []curves.Scalar {
products := make([]curves.Scalar, len(a))
for i, aElem := range a {
product := aElem.Mul(x)
products[i] = product
}
return products
}
// multiplyPairwisePointVectors takes two lists of points (g, h) and performs a pairwise multiplication returning a list of points.
func multiplyPairwisePointVectors(g, h []curves.Point) ([]curves.Point, error) {
if len(g) != len(h) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Point, len(g))
for i, gElem := range g {
product[i] = gElem.Add(h[i])
}
return product, nil
}
// multiplyPairwiseScalarVectors takes two lists of points (a, b) and performs a pairwise multiplication returning a list of scalars.
func multiplyPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of point vectors must be the same")
}
product := make([]curves.Scalar, len(a))
for i, aElem := range a {
product[i] = aElem.Mul(b[i])
}
return product, nil
}
// addPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise addition returning a list of scalars.
func addPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
sum := make([]curves.Scalar, len(a))
for i, aElem := range a {
sum[i] = aElem.Add(b[i])
}
return sum, nil
}
// subtractPairwiseScalarVectors takes two lists of scalars (a, b) and performs a pairwise subtraction returning a list of scalars.
func subtractPairwiseScalarVectors(a, b []curves.Scalar) ([]curves.Scalar, error) {
if len(a) != len(b) {
return nil, errors.New("length of scalar vectors must be the same")
}
diff := make([]curves.Scalar, len(a))
for i, aElem := range a {
diff[i] = aElem.Sub(b[i])
}
return diff, nil
}
// invertScalars takes a list of scalars then returns a list with each element inverted.
func invertScalars(xs []curves.Scalar) ([]curves.Scalar, error) {
xinvs := make([]curves.Scalar, len(xs))
for i, x := range xs {
xinv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "bulletproof helpers invertx")
}
xinvs[i] = xinv
}
return xinvs, nil
}
// isPowerOfTwo returns whether a number i is a power of two or not.
func isPowerOfTwo(i int) bool {
return i&(i-1) == 0
}
// get2nVector returns a scalar vector 2^n such that [1, 2, 4, ... 2^(n-1)]
// See k^n and 2^n definitions on pg 12 of https://eprint.iacr.org/2017/1066.pdf
func get2nVector(length int, curve curves.Curve) []curves.Scalar {
vector2n := make([]curves.Scalar, length)
vector2n[0] = curve.Scalar.One()
for i := 1; i < length; i++ {
vector2n[i] = vector2n[i-1].Double()
}
return vector2n
}
func get1nVector(length int, curve curves.Curve) []curves.Scalar {
vector1n := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
vector1n[i] = curve.Scalar.One()
}
return vector1n
}
func getknVector(k curves.Scalar, length int, curve curves.Curve) []curves.Scalar {
vectorkn := make([]curves.Scalar, length)
vectorkn[0] = curve.Scalar.One()
vectorkn[1] = k
for i := 2; i < length; i++ {
vectorkn[i] = vectorkn[i-1].Mul(k)
}
return vectorkn
}

View File

@ -0,0 +1,85 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestInnerProductHappyPath(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
_, err := innerProduct(a, b)
require.NoError(t, err)
}
func TestInnerProductMismatchedLengths(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(3, *curve)
b := randScalarVec(4, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductEmptyVector(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
_, err := innerProduct(a, b)
require.Error(t, err)
}
func TestInnerProductOut(t *testing.T) {
curve := curves.ED25519()
a := randScalarVec(2, *curve)
b := randScalarVec(2, *curve)
c, err := innerProduct(a, b)
require.NoError(t, err)
// Calculate manually a0*b0 + a1*b1
cPrime := a[0].Mul(b[0]).Add(a[1].Mul(b[1]))
require.Equal(t, c, cPrime)
}
func TestSplitListofPointsHappyPath(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(10, *curve)
firstHalf, secondHalf, err := splitPointVector(points)
require.NoError(t, err)
require.Len(t, firstHalf, 5)
require.Len(t, secondHalf, 5)
}
func TestSplitListofPointsOddLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(11, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func TestSplitListofPointsZeroLength(t *testing.T) {
curve := curves.ED25519()
points := randPointVec(0, *curve)
_, _, err := splitPointVector(points)
require.Error(t, err)
}
func randScalarVec(length int, curve curves.Curve) []curves.Scalar {
out := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
out[i] = curve.Scalar.Random(crand.Reader)
}
return out
}
func randPointVec(length int, curve curves.Curve) []curves.Point {
out := make([]curves.Point, length)
for i := 0; i < length; i++ {
out[i] = curve.Point.Random(crand.Reader)
}
return out
}

View File

@ -0,0 +1,396 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// InnerProductProver is the struct used to create InnerProductProofs
// It specifies which curve to use and holds precomputed generators
// See NewInnerProductProver() for prover initialization.
type InnerProductProver struct {
curve curves.Curve
generators ippGenerators
}
// InnerProductProof contains necessary output for the inner product proof
// a and b are the final input vectors of scalars, they should be of length 1
// Ls and Rs are calculated per recursion of the IPP and are necessary for verification
// See section 3.1 on pg 15 of https://eprint.iacr.org/2017/1066.pdf
type InnerProductProof struct {
a, b curves.Scalar
capLs, capRs []curves.Point
curve *curves.Curve
}
// ippRecursion is the same as IPP but tracks recursive a', b', g', h' and Ls and Rs
// It should only be used internally by InnerProductProver.Prove()
// See L35 on pg 16 of https://eprint.iacr.org/2017/1066.pdf
type ippRecursion struct {
a, b []curves.Scalar
c curves.Scalar
capLs, capRs []curves.Point
g, h []curves.Point
u, capP curves.Point
transcript *merlin.Transcript
}
// NewInnerProductProver initializes a new prover
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A prover can be used to construct inner product proofs for vectors of length less than or equal to maxVectorLength
// A prover is defined by an explicit curve.
func NewInnerProductProver(maxVectorLength int, domain []byte, curve curves.Curve) (*InnerProductProver, error) {
generators, err := getGeneratorPoints(maxVectorLength, domain, curve)
if err != nil {
return nil, errors.Wrap(err, "ipp getGenerators")
}
return &InnerProductProver{curve: curve, generators: *generators}, nil
}
// NewInnerProductProof initializes a new InnerProductProof for a specified curve
// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct.
func NewInnerProductProof(curve *curves.Curve) *InnerProductProof {
var capLs, capRs []curves.Point
newProof := InnerProductProof{
a: curve.NewScalar(),
b: curve.NewScalar(),
capLs: capLs,
capRs: capRs,
curve: curve,
}
return &newProof
}
// rangeToIPP takes the output of a range proof and converts it into an inner product proof
// See section 4.2 on pg 20
// The conversion specifies generators to use (g and hPrime), as well as the two vectors l, r of which the inner product is tHat
// Additionally, note that the P used for the IPP is in fact P*h^-mu from the range proof.
func (prover *InnerProductProver) rangeToIPP(proofG, proofH []curves.Point, l, r []curves.Scalar, tHat curves.Scalar, capPhmuinv, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Note that P as a witness is only g^l * h^r
// P needs to be in the form of g^l * h^r * u^<l,r>
// Calculate the final P including the u^<l,r> term
utHat := u.Mul(tHat)
capP := capPhmuinv.Add(utHat)
// Use params to prove inner product
recursionParams := &ippRecursion{
a: l,
b: r,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: tHat,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// getP returns the initial P value given two scalars a,b and point u
// This method should only be used for testing
// See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) getP(a, b []curves.Scalar, u curves.Point) (curves.Point, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
return capP, nil
}
// Prove executes the prover protocol on pg 16 of https://eprint.iacr.org/2017/1066.pdf
// It generates an inner product proof for vectors a and b, using u to blind the inner product in P
// A transcript is used for the Fiat Shamir heuristic.
func (prover *InnerProductProver) Prove(a, b []curves.Scalar, u curves.Point, transcript *merlin.Transcript) (*InnerProductProof, error) {
// Vectors must have length power of two
if !isPowerOfTwo(len(a)) {
return nil, errors.New("ipp vector length must be power of two")
}
// Generator vectors must be same length
if len(prover.generators.G) != len(prover.generators.H) {
return nil, errors.New("ipp generator lengths of g and h must be equal")
}
// Inner product requires len(a) == len(b) else error is returned
c, err := innerProduct(a, b)
if err != nil {
return nil, errors.Wrap(err, "ipp getInnerProduct")
}
// Length of vectors must be less than the number of generators generated
if len(a) > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:len(a)]
proofH := prover.generators.H[0:len(b)]
// initial P = g^a * h^b * u^(a dot b) (See (3) on page 13 of https://eprint.iacr.org/2017/1066.pdf)
ga := prover.curve.NewGeneratorPoint().SumOfProducts(proofG, a)
hb := prover.curve.NewGeneratorPoint().SumOfProducts(proofH, b)
uadotb := u.Mul(c)
capP := ga.Add(hb).Add(uadotb)
recursionParams := &ippRecursion{
a: a,
b: b,
capLs: []curves.Point{},
capRs: []curves.Point{},
c: c,
g: proofG,
h: proofH,
capP: capP,
u: u,
transcript: transcript,
}
return prover.proveRecursive(recursionParams)
}
// proveRecursive executes the recursion on pg 16 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) proveRecursive(recursionParams *ippRecursion) (*InnerProductProof, error) {
// length checks
if len(recursionParams.a) != len(recursionParams.b) {
return nil, errors.New("ipp proveRecursive a and b different lengths")
}
if len(recursionParams.g) != len(recursionParams.h) {
return nil, errors.New("ipp proveRecursive g and h different lengths")
}
if len(recursionParams.a) != len(recursionParams.g) {
return nil, errors.New("ipp proveRecursive scalar and point vectors different lengths")
}
// Base case (L14, pg16 of https://eprint.iacr.org/2017/1066.pdf)
if len(recursionParams.a) == 1 {
proof := &InnerProductProof{
a: recursionParams.a[0],
b: recursionParams.b[0],
capLs: recursionParams.capLs,
capRs: recursionParams.capRs,
curve: &prover.curve,
}
return proof, nil
}
// Split current state into low (first half) vs high (second half) vectors
aLo, aHi, err := splitScalarVector(recursionParams.a)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
bLo, bHi, err := splitScalarVector(recursionParams.b)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitScalarVector")
}
gLo, gHi, err := splitPointVector(recursionParams.g)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
hLo, hHi, err := splitPointVector(recursionParams.h)
if err != nil {
return nil, errors.Wrap(err, "recursionParams splitPointVector")
}
// c_l, c_r (L21,22, pg16 of https://eprint.iacr.org/2017/1066.pdf)
cL, err := innerProduct(aLo, bHi)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
cR, err := innerProduct(aHi, bLo)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// L, R (L23,24, pg16 of https://eprint.iacr.org/2017/1066.pdf)
lga := prover.curve.Point.SumOfProducts(gHi, aLo)
lhb := prover.curve.Point.SumOfProducts(hLo, bHi)
ucL := recursionParams.u.Mul(cL)
capL := lga.Add(lhb).Add(ucL)
rga := prover.curve.Point.SumOfProducts(gLo, aHi)
rhb := prover.curve.Point.SumOfProducts(hHi, bLo)
ucR := recursionParams.u.Mul(cR)
capR := rga.Add(rhb).Add(ucR)
// Add L,R for verifier to use to calculate final g, h
newL := recursionParams.capLs
newL = append(newL, capL)
newR := recursionParams.capRs
newR = append(newR, capR)
// Get x from L, R for non-interactive (See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf)
// Note this replaces the interactive model, i.e. L36-28 of pg16 of https://eprint.iacr.org/2017/1066.pdf
x, err := prover.calcx(capL, capR, recursionParams.transcript)
if err != nil {
return nil, errors.Wrap(err, "recursionParams calcx")
}
// Calculate recursive inputs
xInv, err := x.Invert()
if err != nil {
return nil, errors.Wrap(err, "recursionParams x.Invert")
}
// g', h' (L29,30, pg16 of https://eprint.iacr.org/2017/1066.pdf)
gLoxInverse := multiplyScalarToPointVector(xInv, gLo)
gHix := multiplyScalarToPointVector(x, gHi)
gPrime, err := multiplyPairwisePointVectors(gLoxInverse, gHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
hLox := multiplyScalarToPointVector(x, hLo)
hHixInv := multiplyScalarToPointVector(xInv, hHi)
hPrime, err := multiplyPairwisePointVectors(hLox, hHixInv)
if err != nil {
return nil, errors.Wrap(err, "recursionParams multiplyPairwisePointVectors")
}
// P' (L31, pg16 of https://eprint.iacr.org/2017/1066.pdf)
xSquare := x.Square()
xInvSquare := xInv.Square()
LxSquare := capL.Mul(xSquare)
RxInvSquare := capR.Mul(xInvSquare)
PPrime := LxSquare.Add(recursionParams.capP).Add(RxInvSquare)
// a', b' (L33, 34, pg16 of https://eprint.iacr.org/2017/1066.pdf)
aLox := multiplyScalarToScalarVector(x, aLo)
aHixIn := multiplyScalarToScalarVector(xInv, aHi)
aPrime, err := addPairwiseScalarVectors(aLox, aHixIn)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
bLoxInv := multiplyScalarToScalarVector(xInv, bLo)
bHix := multiplyScalarToScalarVector(x, bHi)
bPrime, err := addPairwiseScalarVectors(bLoxInv, bHix)
if err != nil {
return nil, errors.Wrap(err, "recursionParams addPairwiseScalarVectors")
}
// c'
cPrime, err := innerProduct(aPrime, bPrime)
if err != nil {
return nil, errors.Wrap(err, "recursionParams innerProduct")
}
// Make recursive call (L35, pg16 of https://eprint.iacr.org/2017/1066.pdf)
recursiveIPP := &ippRecursion{
a: aPrime,
b: bPrime,
capLs: newL,
capRs: newR,
c: cPrime,
g: gPrime,
h: hPrime,
capP: PPrime,
u: recursionParams.u,
transcript: recursionParams.transcript,
}
out, err := prover.proveRecursive(recursiveIPP)
if err != nil {
return nil, errors.Wrap(err, "recursionParams proveRecursive")
}
return out, nil
}
// calcx uses a merlin transcript for Fiat Shamir
// For each recursion, it takes the current state of the transcript and appends the newly calculated L and R values
// A new scalar is then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func (prover *InnerProductProver) calcx(capL, capR curves.Point, transcript *merlin.Transcript) (curves.Scalar, error) {
// Add the newest capL and capR values to transcript
transcript.AppendMessage([]byte("addRecursiveL"), capL.ToAffineUncompressed())
transcript.AppendMessage([]byte("addRecursiveR"), capR.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := prover.curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
return x, nil
}
// MarshalBinary takes an inner product proof and marshals into bytes.
func (proof *InnerProductProof) MarshalBinary() []byte {
var out []byte
out = append(out, proof.a.Bytes()...)
out = append(out, proof.b.Bytes()...)
for i, capLElem := range proof.capLs {
capRElem := proof.capRs[i]
out = append(out, capLElem.ToAffineCompressed()...)
out = append(out, capRElem.ToAffineCompressed()...)
}
return out
}
// UnmarshalBinary takes bytes of a marshaled proof and writes them into an inner product proof
// The inner product proof used should be from the output of NewInnerProductProof().
func (proof *InnerProductProof) UnmarshalBinary(data []byte) error {
scalarLen := len(proof.curve.NewScalar().Bytes())
pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed())
ptr := 0
// Get scalars
a, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.a = a
ptr += scalarLen
b, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary SetBytes")
}
proof.b = b
ptr += scalarLen
// Get points
var capLs, capRs []curves.Point //nolint:prealloc // pointer arithmetic makes it too unreadable.
for ptr < len(data) {
capLElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capLs = append(capLs, capLElem)
ptr += pointLen
capRElem, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("innerProductProof UnmarshalBinary FromAffineCompressed")
}
capRs = append(capRs, capRElem)
ptr += pointLen
}
proof.capLs = capLs
proof.capRs = capRs
return nil
}

View File

@ -0,0 +1,99 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestIPPHappyPath(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
require.Equal(t, 3, len(proof.capLs))
require.Equal(t, 3, len(proof.capRs))
}
func TestIPPMismatchedVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(4, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPNonPowerOfTwoLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(3, *curve)
b := randScalarVec(3, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPZeroLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(0, *curve)
b := randScalarVec(0, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPGreaterThanMaxLengthVectors(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(16, *curve)
b := randScalarVec(16, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(a, b, u, transcript)
require.Error(t, err)
}
func TestIPPMarshal(t *testing.T) {
curve := curves.ED25519()
prover, err := NewInnerProductProver(8, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(8, *curve)
b := randScalarVec(8, *curve)
u := curve.Point.Random(crand.Reader)
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewInnerProductProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.Zero(t, proof.a.Cmp(proofPrime.a))
require.Zero(t, proof.b.Cmp(proofPrime.b))
for i, proofCapLElem := range proof.capLs {
proofPrimeCapLElem := proofPrime.capLs[i]
require.True(t, proofCapLElem.Equal(proofPrimeCapLElem))
proofCapRElem := proof.capRs[i]
proofPrimeCapRElem := proofPrime.capRs[i]
require.True(t, proofCapRElem.Equal(proofPrimeCapRElem))
}
}

View File

@ -0,0 +1,209 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// InnerProductVerifier is the struct used to verify inner product proofs
// It specifies which curve to use and holds precomputed generators
// See NewInnerProductProver() for prover initialization.
type InnerProductVerifier struct {
curve curves.Curve
generators ippGenerators
}
// NewInnerProductVerifier initializes a new verifier
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A verifier can be used to verify inner product proofs for vectors of length less than or equal to maxVectorLength
// A verifier is defined by an explicit curve.
func NewInnerProductVerifier(maxVectorLength int, domain []byte, curve curves.Curve) (*InnerProductVerifier, error) {
generators, err := getGeneratorPoints(maxVectorLength, domain, curve)
if err != nil {
return nil, errors.Wrap(err, "ipp getGenerators")
}
return &InnerProductVerifier{curve: curve, generators: *generators}, nil
}
// Verify verifies the given proof inputs
// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf
func (verifier *InnerProductVerifier) Verify(capP, u curves.Point, proof *InnerProductProof, transcript *merlin.Transcript) (bool, error) {
if len(proof.capLs) != len(proof.capRs) {
return false, errors.New("ipp capLs and capRs must be same length")
}
// Generator vectors must be same length
if len(verifier.generators.G) != len(verifier.generators.H) {
return false, errors.New("ipp generator lengths of g and h must be equal")
}
// Get generators for each elem in a, b and one more for u
// len(Ls) = log n, therefore can just exponentiate
n := 1 << len(proof.capLs)
// Length of vectors must be less than the number of generators generated
if n > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:n]
proofH := verifier.generators.H[0:n]
xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "verifier getxs")
}
s, err := verifier.getsNew(xs, n)
if err != nil {
return false, errors.Wrap(err, "verifier getss")
}
lhs, err := verifier.getLHS(u, proof, proofG, proofH, s)
if err != nil {
return false, errors.Wrap(err, "verify getLHS")
}
rhs, err := verifier.getRHS(capP, proof, xs)
if err != nil {
return false, errors.Wrap(err, "verify getRHS")
}
return lhs.Equal(rhs), nil
}
// Verify verifies the given proof inputs
// It implements the final comparison of section 3.1 on pg17 of https://eprint.iacr.org/2017/1066.pdf
func (verifier *InnerProductVerifier) VerifyFromRangeProof(proofG, proofH []curves.Point, capPhmuinv, u curves.Point, tHat curves.Scalar, proof *InnerProductProof, transcript *merlin.Transcript) (bool, error) {
// Get generators for each elem in a, b and one more for u
// len(Ls) = log n, therefore can just exponentiate
n := 1 << len(proof.capLs)
xs, err := getxs(transcript, proof.capLs, proof.capRs, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "verifier getxs")
}
s, err := verifier.gets(xs, n)
if err != nil {
return false, errors.Wrap(err, "verifier getss")
}
lhs, err := verifier.getLHS(u, proof, proofG, proofH, s)
if err != nil {
return false, errors.Wrap(err, "verify getLHS")
}
utHat := u.Mul(tHat)
capP := capPhmuinv.Add(utHat)
rhs, err := verifier.getRHS(capP, proof, xs)
if err != nil {
return false, errors.Wrap(err, "verify getRHS")
}
return lhs.Equal(rhs), nil
}
// getRHS gets the right hand side of the final comparison of section 3.1 on pg17.
func (*InnerProductVerifier) getRHS(capP curves.Point, proof *InnerProductProof, xs []curves.Scalar) (curves.Point, error) {
product := capP
for j, Lj := range proof.capLs {
Rj := proof.capRs[j]
xj := xs[j]
xjSquare := xj.Square()
xjSquareInv, err := xjSquare.Invert()
if err != nil {
return nil, errors.Wrap(err, "verify invert")
}
LjxjSquare := Lj.Mul(xjSquare)
RjxjSquareInv := Rj.Mul(xjSquareInv)
product = product.Add(LjxjSquare).Add(RjxjSquareInv)
}
return product, nil
}
// getLHS gets the left hand side of the final comparison of section 3.1 on pg17.
func (verifier *InnerProductVerifier) getLHS(u curves.Point, proof *InnerProductProof, g, h []curves.Point, s []curves.Scalar) (curves.Point, error) {
sInv, err := invertScalars(s)
if err != nil {
return nil, errors.Wrap(err, "verify invertScalars")
}
// g^(a*s)
as := multiplyScalarToScalarVector(proof.a, s)
gas := verifier.curve.Point.SumOfProducts(g, as)
// h^(b*s^-1)
bsInv := multiplyScalarToScalarVector(proof.b, sInv)
hbsInv := verifier.curve.Point.SumOfProducts(h, bsInv)
// u^a*b
ab := proof.a.Mul(proof.b)
uab := u.Mul(ab)
// g^(a*s) * h^(b*s^-1) * u^a*b
out := gas.Add(hbsInv).Add(uab)
return out, nil
}
// getxs calculates the x values from Ls and Rs
// Note that each x is read from the transcript, then the L and R at a certain index are written to the transcript
// This mirrors the reading of xs and writing of Ls and Rs in the prover.
func getxs(transcript *merlin.Transcript, capLs, capRs []curves.Point, curve curves.Curve) ([]curves.Scalar, error) {
xs := make([]curves.Scalar, len(capLs))
for i, capLi := range capLs {
capRi := capRs[i]
// Add the newest L and R values to transcript
transcript.AppendMessage([]byte("addRecursiveL"), capLi.ToAffineUncompressed())
transcript.AppendMessage([]byte("addRecursiveR"), capRi.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
xs[i] = x
}
return xs, nil
}
// gets calculates the vector s of values used for verification
// See the second expression of section 3.1 on pg15
//nolint
func (verifier *InnerProductVerifier) gets(xs []curves.Scalar, n int) ([]curves.Scalar, error) {
ss := make([]curves.Scalar, n)
for i := 0; i < n; i++ {
si := verifier.curve.Scalar.One()
for j, xj := range xs {
if i>>(len(xs)-j-1)&0x01 == 1 {
si = si.Mul(xj)
} else {
xjInverse, err := xj.Invert()
if err != nil {
return nil, errors.Wrap(err, "getss invert")
}
si = si.Mul(xjInverse)
}
}
ss[i] = si
}
return ss, nil
}
// getsNew calculates the vector s of values used for verification
// It provides analogous functionality as gets(), but uses a O(n) algorithm vs O(nlogn)
// The algorithm inverts all xs, then begins multiplying the inversion by the square of x elements to
// calculate all s values thus minimizing necessary inversions/ computation.
func (verifier *InnerProductVerifier) getsNew(xs []curves.Scalar, n int) ([]curves.Scalar, error) {
var err error
ss := make([]curves.Scalar, n)
// First element is all xs inverted mul'd
ss[0] = verifier.curve.Scalar.One()
for _, xj := range xs {
ss[0] = ss[0].Mul(xj)
}
ss[0], err = ss[0].Invert()
if err != nil {
return nil, errors.Wrap(err, "ipp gets inv ss0")
}
for j, xj := range xs {
xjSquared := xj.Square()
for i := 0; i < n; i += 1 << (len(xs) - j) {
ss[i+1<<(len(xs)-j-1)] = ss[i].Mul(xjSquared)
}
}
return ss, nil
}

View File

@ -0,0 +1,79 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestIPPVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
vecLength := 256
prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proof, err := prover.Prove(a, b, u, transcriptProver)
require.NoError(t, err)
verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
require.NoError(t, err)
capP, err := prover.getP(a, b, u)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
verified, err := verifier.Verify(capP, u, proof, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func BenchmarkIPPVerification(bench *testing.B) {
curve := curves.ED25519()
vecLength := 1024
prover, _ := NewInnerProductProver(vecLength, []byte("test"), *curve)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proof, _ := prover.Prove(a, b, u, transcriptProver)
verifier, _ := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
capP, _ := prover.getP(a, b, u)
transcriptVerifier := merlin.NewTranscript("test")
verified, _ := verifier.Verify(capP, u, proof, transcriptVerifier)
require.True(bench, verified)
}
func TestIPPVerifyInvalidProof(t *testing.T) {
curve := curves.ED25519()
vecLength := 64
prover, err := NewInnerProductProver(vecLength, []byte("test"), *curve)
require.NoError(t, err)
a := randScalarVec(vecLength, *curve)
b := randScalarVec(vecLength, *curve)
u := curve.Point.Random(crand.Reader)
aPrime := randScalarVec(64, *curve)
bPrime := randScalarVec(64, *curve)
uPrime := curve.Point.Random(crand.Reader)
transcriptProver := merlin.NewTranscript("test")
proofPrime, err := prover.Prove(aPrime, bPrime, uPrime, transcriptProver)
require.NoError(t, err)
verifier, err := NewInnerProductVerifier(vecLength, []byte("test"), *curve)
require.NoError(t, err)
capP, err := prover.getP(a, b, u)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
// Check for different capP, u from proof
verified, err := verifier.Verify(capP, u, proofPrime, transcriptVerifier)
require.NoError(t, err)
require.False(t, verified)
}

View File

@ -0,0 +1,348 @@
package bulletproof
import (
crand "crypto/rand"
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// BatchProve proves that a list of scalars v are in the range n.
// It implements the aggregating logarithmic proofs defined on pg21.
// Instead of taking a single value and a single blinding factor, BatchProve takes in a list of values and list of
// blinding factors.
func (prover *RangeProver) BatchProve(v, gamma []curves.Scalar, n int, proofGenerators RangeProofGenerators, transcript *merlin.Transcript) (*RangeProof, error) {
// Define nm as the total bits required for secrets, calculated as number of secrets * n
m := len(v)
nm := n * m
// nm must be less than or equal to the number of generators generated
if nm > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength")
}
// In case where nm is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:nm]
proofH := prover.generators.H[0:nm]
// Check that each elem in v is in range [0, 2^n]
for _, vi := range v {
checkedRange := checkRange(vi, n)
if checkedRange != nil {
return nil, checkedRange
}
}
// L40 on pg19
aL, err := getaLBatched(v, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
onenm := get1nVector(nm, prover.curve)
// L41 on pg19
aR, err := subtractPairwiseScalarVectors(aL, onenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
alpha := prover.curve.Scalar.Random(crand.Reader)
// Calc A (L44, pg19)
halpha := proofGenerators.h.Mul(alpha)
gaL := prover.curve.Point.SumOfProducts(proofG, aL)
haR := prover.curve.Point.SumOfProducts(proofH, aR)
capA := halpha.Add(gaL).Add(haR)
// L45, 46, pg19
sL := getBlindingVector(nm, prover.curve)
sR := getBlindingVector(nm, prover.curve)
rho := prover.curve.Scalar.Random(crand.Reader)
// Calc S (L47, pg19)
hrho := proofGenerators.h.Mul(rho)
gsL := prover.curve.Point.SumOfProducts(proofG, sL)
hsR := prover.curve.Point.SumOfProducts(proofH, sR)
capS := hrho.Add(gsL).Add(hsR)
// Fiat Shamir for y,z (L49, pg19)
capV := getcapVBatched(v, gamma, proofGenerators.g, proofGenerators.h)
y, z, err := calcyzBatched(capV, capA, capS, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t_1, t_2
// See the l(X), r(X), equations on pg 21
// Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2
// (a_l - z*1^n)
zonenm := multiplyScalarToScalarVector(z, onenm)
constantTerml, err := subtractPairwiseScalarVectors(aL, zonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTerml := sL
// zSum term, see equation 71 on pg21
zSum := getSumTermrXBatched(z, n, len(v), prover.curve)
// a_r + z*1^nm
aRPluszonenm, err := addPairwiseScalarVectors(aR, zonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ynm := getknVector(y, nm, prover.curve)
hadamard, err := multiplyPairwiseScalarVectors(ynm, aRPluszonenm)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
constantTermr, err := addPairwiseScalarVectors(hadamard, zSum)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTermr, err := multiplyPairwiseScalarVectors(ynm, sR)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term
// and r(X)'s linear term dot l(X)'s constant term
t1FirstTerm, err := innerProduct(linearTerml, constantTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1SecondTerm, err := innerProduct(linearTermr, constantTerml)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1 := t1FirstTerm.Add(t1SecondTerm)
// t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms
t2, err := innerProduct(linearTerml, linearTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// L52, pg20
tau1 := prover.curve.Scalar.Random(crand.Reader)
tau2 := prover.curve.Scalar.Random(crand.Reader)
// T_1, T_2 (L53, pg20)
capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1))
capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2))
// Fiat shamir for x (L55, pg20)
x, err := calcx(capT1, capT2, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc l
// Instead of using the expression in the line, evaluate l() at x
sLx := multiplyScalarToScalarVector(x, linearTerml)
l, err := addPairwiseScalarVectors(constantTerml, sLx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc r
// Instead of using the expression in the line, evaluate r() at x
ynsRx := multiplyScalarToScalarVector(x, linearTermr)
r, err := addPairwiseScalarVectors(constantTermr, ynsRx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t hat
// For efficiency, instead of calculating the dot product, evaluate t() at x
zm := getknVector(z, m, prover.curve)
zsquarezm := multiplyScalarToScalarVector(z.Square(), zm)
sumv := prover.curve.Scalar.Zero()
for i := 0; i < m; i++ {
elem := zsquarezm[i].Mul(v[i])
sumv = sumv.Add(elem)
}
deltayzBatched, err := deltayzBatched(y, z, n, m, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t0 := sumv.Add(deltayzBatched)
tLinear := t1.Mul(x)
tQuadratic := t2.Mul(x.Square())
tHat := t0.Add(tLinear).Add(tQuadratic)
// Calc tau_x (L61, pg20)
tau2xsquare := tau2.Mul(x.Square())
tau1x := tau1.Mul(x)
zsum := prover.curve.Scalar.Zero()
zExp := z.Clone()
for j := 1; j < m+1; j++ {
zExp = zExp.Mul(z)
zsum = zsum.Add(zExp.Mul(gamma[j-1]))
}
taux := tau2xsquare.Add(tau1x).Add(zsum)
// Calc mu (L62, pg20)
mu := alpha.Add(rho.Mul(x))
// Calc IPP (See section 4.2)
hPrime, err := gethPrime(proofH, y, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// P is redefined in batched case, see bottom equation on pg21.
capPhmu := getPhmuBatched(proofG, hPrime, proofGenerators.h, capA, capS, x, y, z, mu, n, m, prover.curve)
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := prover.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ipp, err := prover.ippProver.rangeToIPP(proofG, hPrime, l, r, tHat, capPhmu, proofGenerators.u.Mul(w), transcript)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
out := &RangeProof{
capA: capA,
capS: capS,
capT1: capT1,
capT2: capT2,
taux: taux,
mu: mu,
tHat: tHat,
ipp: ipp,
curve: &prover.curve,
}
return out, nil
}
// See final term of L71 on pg 21
// Sigma_{j=1}^{m} z^{1+j} * (0^{(j-1)*n} || 2^{n} || 0^{(m-j)*n}).
func getSumTermrXBatched(z curves.Scalar, n, m int, curve curves.Curve) []curves.Scalar {
twoN := get2nVector(n, curve)
var out []curves.Scalar
// The final power should be one more than m
zExp := z.Clone()
for j := 0; j < m; j++ {
zExp = zExp.Mul(z)
elem := multiplyScalarToScalarVector(zExp, twoN)
out = append(out, elem...)
}
return out
}
func getcapVBatched(v, gamma []curves.Scalar, g, h curves.Point) []curves.Point {
out := make([]curves.Point, len(v))
for i, vi := range v {
out[i] = getcapV(vi, gamma[i], g, h)
}
return out
}
func getaLBatched(v []curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) {
var aL []curves.Scalar
for _, vi := range v {
aLi, err := getaL(vi, n, curve)
if err != nil {
return nil, err
}
aL = append(aL, aLi...)
}
return aL, nil
}
func calcyzBatched(capV []curves.Point, capA, capS curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, curves.Scalar, error) {
// Add the A,S values to transcript
for _, capVi := range capV {
transcript.AppendMessage([]byte("addV"), capVi.ToAffineUncompressed())
}
transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed())
// Read 64 bytes twice from, set to scalar for y and z
yBytes := transcript.ExtractBytes([]byte("gety"), 64)
y, err := curve.NewScalar().SetBytesWide(yBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
zBytes := transcript.ExtractBytes([]byte("getz"), 64)
z, err := curve.NewScalar().SetBytesWide(zBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
return y, z, nil
}
func deltayzBatched(y, z curves.Scalar, n, m int, curve curves.Curve) (curves.Scalar, error) {
// z - z^2
zMinuszsquare := z.Sub(z.Square())
// 1^(n*m)
onenm := get1nVector(n*m, curve)
// <1^nm, y^nm>
onenmdotynm, err := innerProduct(onenm, getknVector(y, n*m, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// (z - z^2)*<1^n, y^n>
termFirst := zMinuszsquare.Mul(onenmdotynm)
// <1^n, 2^n>
onendottwon, err := innerProduct(get1nVector(n, curve), get2nVector(n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
termSecond := curve.Scalar.Zero()
zExp := z.Square()
for j := 1; j < m+1; j++ {
zExp = zExp.Mul(z)
elem := zExp.Mul(onendottwon)
termSecond = termSecond.Add(elem)
}
// (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n>
out := termFirst.Sub(termSecond)
return out, nil
}
// Bottom equation on pg21.
func getPhmuBatched(proofG, proofHPrime []curves.Point, h, capA, capS curves.Point, x, y, z, mu curves.Scalar, n, m int, curve curves.Curve) curves.Point {
twoN := get2nVector(n, curve)
// h'^(z*y^n + z^2*2^n)
lastElem := curve.NewIdentityPoint()
zExp := z.Clone()
for j := 1; j < m+1; j++ {
// Get subvector of h
hSubvector := proofHPrime[(j-1)*n : j*n]
// z^(j+1)
zExp = zExp.Mul(z)
exp := multiplyScalarToScalarVector(zExp, twoN)
// Final elem
elem := curve.Point.SumOfProducts(hSubvector, exp)
lastElem = lastElem.Add(elem)
}
zynm := multiplyScalarToScalarVector(z, getknVector(y, n*m, curve))
hPrimezynm := curve.Point.SumOfProducts(proofHPrime, zynm)
lastElem = lastElem.Add(hPrimezynm)
// S^x
capSx := capS.Mul(x)
// g^-z --> -z*<1,g>
onenm := get1nVector(n*m, curve)
zNeg := z.Neg()
zinvonen := multiplyScalarToScalarVector(zNeg, onenm)
zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen)
// L66 on pg20
P := capA.Add(capSx).Add(zgdotonen).Add(lastElem)
hmu := h.Mul(mu)
Phmu := P.Sub(hmu)
return Phmu
}

View File

@ -0,0 +1,102 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestRangeBatchProverHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
require.NotNil(t, proof)
require.Equal(t, 10, len(proof.ipp.capLs))
require.Equal(t, 10, len(proof.ipp.capRs))
}
func TestGetaLBatched(t *testing.T) {
curve := curves.ED25519()
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
aL, err := getaLBatched(v, 256, *curve)
require.NoError(t, err)
twoN := get2nVector(256, *curve)
for i := 1; i < len(v)+1; i++ {
vec := aL[(i-1)*256 : i*256]
product, err := innerProduct(vec, twoN)
require.NoError(t, err)
require.Zero(t, product.Cmp(v[i-1]))
}
}
func TestRangeBatchProverMarshal(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewRangeProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.True(t, proof.capA.Equal(proofPrime.capA))
require.True(t, proof.capS.Equal(proofPrime.capS))
require.True(t, proof.capT1.Equal(proofPrime.capT1))
require.True(t, proof.capT2.Equal(proofPrime.capT2))
require.Zero(t, proof.taux.Cmp(proofPrime.taux))
require.Zero(t, proof.mu.Cmp(proofPrime.mu))
require.Zero(t, proof.tHat.Cmp(proofPrime.tHat))
}

View File

@ -0,0 +1,91 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// VerifyBatched verifies a given batched range proof.
// It takes in a list of commitments to the secret values as capV instead of a single commitment to a single point
// when compared to the unbatched single range proof case.
func (verifier *RangeVerifier) VerifyBatched(proof *RangeProof, capV []curves.Point, proofGenerators RangeProofGenerators, n int, transcript *merlin.Transcript) (bool, error) {
// Define nm as the total bits required for secrets, calculated as number of secrets * n
m := len(capV)
nm := n * m
// nm must be less than the number of generators generated
if nm > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:nm]
proofH := verifier.generators.H[0:nm]
// Calc y,z,x from Fiat Shamir heuristic
y, z, err := calcyzBatched(capV, proof.capA, proof.capS, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := verifier.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return false, errors.Wrap(err, "rangeproof prove")
}
// Calc delta(y,z), redefined for batched case on pg21
deltayzBatched, err := deltayzBatched(y, z, n, m, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
// Check tHat: L65, pg20
// See equation 72 on pg21
tHatIsValid := verifier.checktHatBatched(proof, capV, proofGenerators.g, proofGenerators.h, deltayzBatched, x, z, m)
if !tHatIsValid {
return false, errors.New("rangeproof verify tHat is invalid")
}
// Verify IPP
hPrime, err := gethPrime(proofH, y, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
capPhmu := getPhmuBatched(proofG, hPrime, proofGenerators.h, proof.capA, proof.capS, x, y, z, proof.mu, n, m, verifier.curve)
ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof(proofG, hPrime, capPhmu, proofGenerators.u.Mul(w), proof.tHat, proof.ipp, transcript)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
return ippVerified, nil
}
// L65, pg20.
func (verifier *RangeVerifier) checktHatBatched(proof *RangeProof, capV []curves.Point, g, h curves.Point, deltayz, x, z curves.Scalar, m int) bool {
// g^tHat * h^tau_x
gtHat := g.Mul(proof.tHat)
htaux := h.Mul(proof.taux)
lhs := gtHat.Add(htaux)
// V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2
// g^delta(y,z) * V^(z^2*z^m) * Tau_1^x * Tau_2^x^2
zm := getknVector(z, m, verifier.curve)
zsquarezm := multiplyScalarToScalarVector(z.Square(), zm)
capVzsquaretwom := verifier.curve.Point.SumOfProducts(capV, zsquarezm)
gdeltayz := g.Mul(deltayz)
capTau1x := proof.capT1.Mul(x)
capTau2xsquare := proof.capT2.Mul(x.Square())
rhs := capVzsquaretwom.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare)
// Compare lhs =? rhs
return lhs.Equal(rhs)
}

View File

@ -0,0 +1,148 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestRangeBatchVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.Random(crand.Reader)
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeBatchVerifyNotInRange(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.Random(crand.Reader)
v3 := curve.Scalar.Random(crand.Reader)
v4 := curve.Scalar.Random(crand.Reader)
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
_, err = prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.Error(t, err)
}
func TestRangeBatchVerifyNonRandom(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.One()
v3 := curve.Scalar.One()
v4 := curve.Scalar.One()
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeBatchVerifyInvalid(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v1 := curve.Scalar.One()
v2 := curve.Scalar.One()
v3 := curve.Scalar.One()
v4 := curve.Scalar.One()
v := []curves.Scalar{v1, v2, v3, v4}
gamma1 := curve.Scalar.Random(crand.Reader)
gamma2 := curve.Scalar.Random(crand.Reader)
gamma3 := curve.Scalar.Random(crand.Reader)
gamma4 := curve.Scalar.Random(crand.Reader)
gamma := []curves.Scalar{gamma1, gamma2, gamma3, gamma4}
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.BatchProve(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n*4, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapVBatched(v, gamma, g, h)
capV[0] = curve.Point.Random(crand.Reader)
verified, err := verifier.VerifyBatched(proof, capV, proofGenerators, n, transcriptVerifier)
require.Error(t, err)
require.False(t, verified)
}

View File

@ -0,0 +1,476 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package bulletproof implements the zero knowledge protocol bulletproofs as defined in https://eprint.iacr.org/2017/1066.pdf
package bulletproof
import (
crand "crypto/rand"
"math/big"
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// RangeProver is the struct used to create RangeProofs
// It specifies which curve to use and holds precomputed generators
// See NewRangeProver() for prover initialization.
type RangeProver struct {
curve curves.Curve
generators *ippGenerators
ippProver *InnerProductProver
}
// RangeProof is the struct used to hold a range proof
// capA is a commitment to a_L and a_R using randomness alpha
// capS is a commitment to s_L and s_R using randomness rho
// capTau1,2 are commitments to t1,t2 respectively using randomness tau_1,2
// tHat represents t(X) as defined on page 19
// taux is the blinding factor for tHat
// ipp is the inner product proof used for compacting the transfer of l,r (See 4.2 on pg20).
type RangeProof struct {
capA, capS, capT1, capT2 curves.Point
taux, mu, tHat curves.Scalar
ipp *InnerProductProof
curve *curves.Curve
}
type RangeProofGenerators struct {
g, h, u curves.Point
}
// NewRangeProver initializes a new prover
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A prover can be used to construct range proofs for vectors of length less than or equal to maxVectorLength
// A prover is defined by an explicit curve.
func NewRangeProver(maxVectorLength int, rangeDomain, ippDomain []byte, curve curves.Curve) (*RangeProver, error) {
generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
ippProver, err := NewInnerProductProver(maxVectorLength, ippDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
return &RangeProver{curve: curve, generators: generators, ippProver: ippProver}, nil
}
// NewRangeProof initializes a new RangeProof for a specified curve
// This should be used in tandem with UnmarshalBinary() to convert a marshaled proof into the struct.
func NewRangeProof(curve *curves.Curve) *RangeProof {
out := RangeProof{
capA: nil,
capS: nil,
capT1: nil,
capT2: nil,
taux: nil,
mu: nil,
tHat: nil,
ipp: NewInnerProductProof(curve),
curve: curve,
}
return &out
}
// Prove uses the range prover to prove that some value v is within the range [0, 2^n]
// It implements the protocol defined on pgs 19,20 in https://eprint.iacr.org/2017/1066.pdf
// v is the value of which to prove the range
// n is the power that specifies the upper bound of the range, ie. 2^n
// gamma is a scalar used for as a blinding factor
// g, h, u are unique points used as generators for the blinding factor
// transcript is a merlin transcript to be used for the fiat shamir heuristic.
func (prover *RangeProver) Prove(v, gamma curves.Scalar, n int, proofGenerators RangeProofGenerators, transcript *merlin.Transcript) (*RangeProof, error) {
// n must be less than or equal to the number of generators generated
if n > len(prover.generators.G) {
return nil, errors.New("ipp vector length must be less than or equal to maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := prover.generators.G[0:n]
proofH := prover.generators.H[0:n]
// Check that v is in range [0, 2^n]
if bigZero := big.NewInt(0); v.BigInt().Cmp(bigZero) == -1 {
return nil, errors.New("v is less than 0")
}
bigTwo := big.NewInt(2)
if n < 0 {
return nil, errors.New("n cannot be less than 0")
}
bigN := big.NewInt(int64(n))
var bigTwoToN big.Int
bigTwoToN.Exp(bigTwo, bigN, nil)
if v.BigInt().Cmp(&bigTwoToN) == 1 {
return nil, errors.New("v is greater than 2^n")
}
// L40 on pg19
aL, err := getaL(v, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
onen := get1nVector(n, prover.curve)
// L41 on pg19
aR, err := subtractPairwiseScalarVectors(aL, onen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
alpha := prover.curve.Scalar.Random(crand.Reader)
// Calc A (L44, pg19)
halpha := proofGenerators.h.Mul(alpha)
gaL := prover.curve.Point.SumOfProducts(proofG, aL)
haR := prover.curve.Point.SumOfProducts(proofH, aR)
capA := halpha.Add(gaL).Add(haR)
// L45, 46, pg19
sL := getBlindingVector(n, prover.curve)
sR := getBlindingVector(n, prover.curve)
rho := prover.curve.Scalar.Random(crand.Reader)
// Calc S (L47, pg19)
hrho := proofGenerators.h.Mul(rho)
gsL := prover.curve.Point.SumOfProducts(proofG, sL)
hsR := prover.curve.Point.SumOfProducts(proofH, sR)
capS := hrho.Add(gsL).Add(hsR)
// Fiat Shamir for y,z (L49, pg19)
capV := getcapV(v, gamma, proofGenerators.g, proofGenerators.h)
y, z, err := calcyz(capV, capA, capS, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t_1, t_2
// See the l(X), r(X), t(X) equations on pg 19
// Use l(X)'s and r(X)'s constant and linear terms to derive t_1 and t_2
// (a_l - z*1^n)
zonen := multiplyScalarToScalarVector(z, onen)
constantTerml, err := subtractPairwiseScalarVectors(aL, zonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTerml := sL
// z^2 * 2^N
twoN := get2nVector(n, prover.curve)
zSquareTwon := multiplyScalarToScalarVector(z.Square(), twoN)
// a_r + z*1^n
aRPluszonen, err := addPairwiseScalarVectors(aR, zonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
yn := getknVector(y, n, prover.curve)
hadamard, err := multiplyPairwiseScalarVectors(yn, aRPluszonen)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
constantTermr, err := addPairwiseScalarVectors(hadamard, zSquareTwon)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
linearTermr, err := multiplyPairwiseScalarVectors(yn, sR)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// t_1 (as the linear coefficient) is the sum of the dot products of l(X)'s linear term dot r(X)'s constant term
// and r(X)'s linear term dot l(X)'s constant term
t1FirstTerm, err := innerProduct(linearTerml, constantTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1SecondTerm, err := innerProduct(linearTermr, constantTerml)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t1 := t1FirstTerm.Add(t1SecondTerm)
// t_2 (as the quadratic coefficient) is the dot product of l(X)'s and r(X)'s linear terms
t2, err := innerProduct(linearTerml, linearTermr)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// L52, pg20
tau1 := prover.curve.Scalar.Random(crand.Reader)
tau2 := prover.curve.Scalar.Random(crand.Reader)
// T_1, T_2 (L53, pg20)
capT1 := proofGenerators.g.Mul(t1).Add(proofGenerators.h.Mul(tau1))
capT2 := proofGenerators.g.Mul(t2).Add(proofGenerators.h.Mul(tau2))
// Fiat shamir for x (L55, pg20)
x, err := calcx(capT1, capT2, transcript, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc l (L58, pg20)
// Instead of using the expression in the line, evaluate l() at x
sLx := multiplyScalarToScalarVector(x, linearTerml)
l, err := addPairwiseScalarVectors(constantTerml, sLx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc r (L59, pg20)
// Instead of using the expression in the line, evaluate r() at x
ynsRx := multiplyScalarToScalarVector(x, linearTermr)
r, err := addPairwiseScalarVectors(constantTermr, ynsRx)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
// Calc t hat (L60, pg20)
// For efficiency, instead of calculating the dot product, evaluate t() at x
deltayz, err := deltayz(y, z, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
t0 := v.Mul(z.Square()).Add(deltayz)
tLinear := t1.Mul(x)
tQuadratic := t2.Mul(x.Square())
tHat := t0.Add(tLinear).Add(tQuadratic)
// Calc tau_x (L61, pg20)
tau2xsquare := tau2.Mul(x.Square())
tau1x := tau1.Mul(x)
zsquaregamma := z.Square().Mul(gamma)
taux := tau2xsquare.Add(tau1x).Add(zsquaregamma)
// Calc mu (L62, pg20)
mu := alpha.Add(rho.Mul(x))
// Calc IPP (See section 4.2)
hPrime, err := gethPrime(proofH, y, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
capPhmu, err := getPhmu(proofG, hPrime, proofGenerators.h, capA, capS, x, y, z, mu, n, prover.curve)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := prover.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
ipp, err := prover.ippProver.rangeToIPP(proofG, hPrime, l, r, tHat, capPhmu, proofGenerators.u.Mul(w), transcript)
if err != nil {
return nil, errors.Wrap(err, "rangeproof prove")
}
out := &RangeProof{
capA: capA,
capS: capS,
capT1: capT1,
capT2: capT2,
taux: taux,
mu: mu,
tHat: tHat,
ipp: ipp,
curve: &prover.curve,
}
return out, nil
}
// MarshalBinary takes a range proof and marshals into bytes.
func (proof *RangeProof) MarshalBinary() []byte {
var out []byte
out = append(out, proof.capA.ToAffineCompressed()...)
out = append(out, proof.capS.ToAffineCompressed()...)
out = append(out, proof.capT1.ToAffineCompressed()...)
out = append(out, proof.capT2.ToAffineCompressed()...)
out = append(out, proof.taux.Bytes()...)
out = append(out, proof.mu.Bytes()...)
out = append(out, proof.tHat.Bytes()...)
out = append(out, proof.ipp.MarshalBinary()...)
return out
}
// UnmarshalBinary takes bytes of a marshaled proof and writes them into a range proof
// The range proof used should be from the output of NewRangeProof().
func (proof *RangeProof) UnmarshalBinary(data []byte) error {
scalarLen := len(proof.curve.NewScalar().Bytes())
pointLen := len(proof.curve.NewGeneratorPoint().ToAffineCompressed())
ptr := 0
// Get points
capA, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capA = capA
ptr += pointLen
capS, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capS = capS
ptr += pointLen
capT1, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capT1 = capT1
ptr += pointLen
capT2, err := proof.curve.Point.FromAffineCompressed(data[ptr : ptr+pointLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary FromAffineCompressed")
}
proof.capT2 = capT2
ptr += pointLen
// Get scalars
taux, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.taux = taux
ptr += scalarLen
mu, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.mu = mu
ptr += scalarLen
tHat, err := proof.curve.NewScalar().SetBytes(data[ptr : ptr+scalarLen])
if err != nil {
return errors.New("rangeProof UnmarshalBinary SetBytes")
}
proof.tHat = tHat
ptr += scalarLen
// Get IPP
err = proof.ipp.UnmarshalBinary(data[ptr:])
if err != nil {
return errors.New("rangeProof UnmarshalBinary")
}
return nil
}
// checkRange validates whether some scalar v is within the range [0, 2^n - 1]
// It will return an error if v is less than 0 or greater than 2^n - 1
// Otherwise it will return nil.
func checkRange(v curves.Scalar, n int) error {
bigOne := big.NewInt(1)
if n < 0 {
return errors.New("n cannot be less than 0")
}
var bigTwoToN big.Int
bigTwoToN.Lsh(bigOne, uint(n))
if v.BigInt().Cmp(&bigTwoToN) == 1 {
return errors.New("v is greater than 2^n")
}
return nil
}
// getBlindingVector returns a vector of scalars used as blinding factors for commitments.
func getBlindingVector(length int, curve curves.Curve) []curves.Scalar {
vec := make([]curves.Scalar, length)
for i := 0; i < length; i++ {
vec[i] = curve.Scalar.Random(crand.Reader)
}
return vec
}
// getcapV returns a commitment to v using blinding factor gamma.
func getcapV(v, gamma curves.Scalar, g, h curves.Point) curves.Point {
return h.Mul(gamma).Add(g.Mul(v))
}
// getaL obtains the bit vector representation of v
// See the a_L definition towards the bottom of pg 17 of https://eprint.iacr.org/2017/1066.pdf
func getaL(v curves.Scalar, n int, curve curves.Curve) ([]curves.Scalar, error) {
var err error
vBytes := v.Bytes()
zero := curve.Scalar.Zero()
one := curve.Scalar.One()
aL := make([]curves.Scalar, n)
for j := 0; j < len(aL); j++ {
aL[j] = zero
}
for i := 0; i < n; i++ {
ithBit := vBytes[i>>3] >> (i & 0x07) & 0x01
aL[i], err = cmoveScalar(zero, one, int(ithBit), curve)
if err != nil {
return nil, errors.Wrap(err, "getaL")
}
}
return aL, nil
}
// cmoveScalar provides a constant time operation that returns x if which is 0 and returns y if which is 1.
func cmoveScalar(x, y curves.Scalar, which int, curve curves.Curve) (curves.Scalar, error) {
if which != 0 && which != 1 {
return nil, errors.New("cmoveScalar which must be 0 or 1")
}
mask := -byte(which)
xBytes := x.Bytes()
yBytes := y.Bytes()
for i, xByte := range xBytes {
xBytes[i] ^= (xByte ^ yBytes[i]) & mask
}
out, err := curve.NewScalar().SetBytes(xBytes)
if err != nil {
return nil, errors.Wrap(err, "cmoveScalar SetBytes")
}
return out, nil
}
// calcyz uses a merlin transcript for Fiat Shamir
// It takes the current state of the transcript and appends the newly calculated capA and capS values
// Two new scalars are then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func calcyz(capV, capA, capS curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, curves.Scalar, error) {
// Add the A,S values to transcript
transcript.AppendMessage([]byte("addV"), capV.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapA"), capA.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapS"), capS.ToAffineUncompressed())
// Read 64 bytes twice from, set to scalar for y and z
yBytes := transcript.ExtractBytes([]byte("gety"), 64)
y, err := curve.NewScalar().SetBytesWide(yBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
zBytes := transcript.ExtractBytes([]byte("getz"), 64)
z, err := curve.NewScalar().SetBytesWide(zBytes)
if err != nil {
return nil, nil, errors.Wrap(err, "calcyz NewScalar SetBytesWide")
}
return y, z, nil
}
// calcx uses a merlin transcript for Fiat Shamir
// It takes the current state of the transcript and appends the newly calculated capT1 and capT2 values
// A new scalar is then read from the transcript
// See section 4.4 pg22 of https://eprint.iacr.org/2017/1066.pdf
func calcx(capT1, capT2 curves.Point, transcript *merlin.Transcript, curve curves.Curve) (curves.Scalar, error) {
// Add the Tau1,2 values to transcript
transcript.AppendMessage([]byte("addcapT1"), capT1.ToAffineUncompressed())
transcript.AppendMessage([]byte("addcapT2"), capT2.ToAffineUncompressed())
// Read 64 bytes from, set to scalar
outBytes := transcript.ExtractBytes([]byte("getx"), 64)
x, err := curve.NewScalar().SetBytesWide(outBytes)
if err != nil {
return nil, errors.Wrap(err, "calcx NewScalar SetBytesWide")
}
return x, nil
}

View File

@ -0,0 +1,86 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestRangeProverHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
require.NotNil(t, proof)
require.Equal(t, 8, len(proof.ipp.capLs))
require.Equal(t, 8, len(proof.ipp.capRs))
}
func TestGetaL(t *testing.T) {
curve := curves.ED25519()
v := curve.Scalar.Random(crand.Reader)
aL, err := getaL(v, 256, *curve)
require.NoError(t, err)
twoN := get2nVector(256, *curve)
product, err := innerProduct(aL, twoN)
require.NoError(t, err)
require.Zero(t, product.Cmp(v))
}
func TestCmove(t *testing.T) {
curve := curves.ED25519()
two := curve.Scalar.One().Double()
four := two.Double()
out, err := cmoveScalar(two, four, 1, *curve)
require.NoError(t, err)
require.Zero(t, out.Cmp(four))
}
func TestRangeProverMarshal(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
proofMarshaled := proof.MarshalBinary()
proofPrime := NewRangeProof(curve)
err = proofPrime.UnmarshalBinary(proofMarshaled)
require.NoError(t, err)
require.True(t, proof.capA.Equal(proofPrime.capA))
require.True(t, proof.capS.Equal(proofPrime.capS))
require.True(t, proof.capT1.Equal(proofPrime.capT1))
require.True(t, proof.capT2.Equal(proofPrime.capT2))
require.Zero(t, proof.taux.Cmp(proofPrime.taux))
require.Zero(t, proof.mu.Cmp(proofPrime.mu))
require.Zero(t, proof.tHat.Cmp(proofPrime.tHat))
}

View File

@ -0,0 +1,187 @@
package bulletproof
import (
"github.com/gtank/merlin"
"github.com/pkg/errors"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
// RangeVerifier is the struct used to verify RangeProofs
// It specifies which curve to use and holds precomputed generators
// See NewRangeVerifier() for verifier initialization.
type RangeVerifier struct {
curve curves.Curve
generators *ippGenerators
ippVerifier *InnerProductVerifier
}
// NewRangeVerifier initializes a new verifier
// It uses the specified domain to generate generators for vectors of at most maxVectorLength
// A verifier can be used to verify range proofs for vectors of length less than or equal to maxVectorLength
// A verifier is defined by an explicit curve.
func NewRangeVerifier(maxVectorLength int, rangeDomain, ippDomain []byte, curve curves.Curve) (*RangeVerifier, error) {
generators, err := getGeneratorPoints(maxVectorLength, rangeDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
ippVerifier, err := NewInnerProductVerifier(maxVectorLength, ippDomain, curve)
if err != nil {
return nil, errors.Wrap(err, "range NewRangeProver")
}
return &RangeVerifier{curve: curve, generators: generators, ippVerifier: ippVerifier}, nil
}
// Verify verifies the given range proof inputs
// It implements the checking of L65 on pg 20
// It also verifies the dot product of <l,r> using the inner product proof\
// capV is a commitment to v using blinding factor gamma
// n is the power that specifies the upper bound of the range, ie. 2^n
// g, h, u are unique points used as generators for the blinding factor
// transcript is a merlin transcript to be used for the fiat shamir heuristic.
func (verifier *RangeVerifier) Verify(proof *RangeProof, capV curves.Point, proofGenerators RangeProofGenerators, n int, transcript *merlin.Transcript) (bool, error) {
// Length of vectors must be less than the number of generators generated
if n > len(verifier.generators.G) {
return false, errors.New("ipp vector length must be less than maxVectorLength")
}
// In case where len(a) is less than number of generators precomputed by prover, trim to length
proofG := verifier.generators.G[0:n]
proofH := verifier.generators.H[0:n]
// Calc y,z,x from Fiat Shamir heuristic
y, z, err := calcyz(capV, proof.capA, proof.capS, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
x, err := calcx(proof.capT1, proof.capT2, transcript, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
wBytes := transcript.ExtractBytes([]byte("getw"), 64)
w, err := verifier.curve.NewScalar().SetBytesWide(wBytes)
if err != nil {
return false, errors.Wrap(err, "rangeproof prove")
}
// Calc delta(y,z)
deltayz, err := deltayz(y, z, n, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
// Check tHat: L65, pg20
tHatIsValid := verifier.checktHat(proof, capV, proofGenerators.g, proofGenerators.h, deltayz, x, z)
if !tHatIsValid {
return false, errors.New("rangeproof verify tHat is invalid")
}
// Verify IPP
hPrime, err := gethPrime(proofH, y, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
capPhmu, err := getPhmu(proofG, hPrime, proofGenerators.h, proof.capA, proof.capS, x, y, z, proof.mu, n, verifier.curve)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
ippVerified, err := verifier.ippVerifier.VerifyFromRangeProof(proofG, hPrime, capPhmu, proofGenerators.u.Mul(w), proof.tHat, proof.ipp, transcript)
if err != nil {
return false, errors.Wrap(err, "rangeproof verify")
}
return ippVerified, nil
}
// L65, pg20.
func (*RangeVerifier) checktHat(proof *RangeProof, capV, g, h curves.Point, deltayz, x, z curves.Scalar) bool {
// g^tHat * h^tau_x
gtHat := g.Mul(proof.tHat)
htaux := h.Mul(proof.taux)
lhs := gtHat.Add(htaux)
// V^z^2 * g^delta(y,z) * Tau_1^x * Tau_2^x^2
capVzsquare := capV.Mul(z.Square())
gdeltayz := g.Mul(deltayz)
capTau1x := proof.capT1.Mul(x)
capTau2xsquare := proof.capT2.Mul(x.Square())
rhs := capVzsquare.Add(gdeltayz).Add(capTau1x).Add(capTau2xsquare)
// Compare lhs =? rhs
return lhs.Equal(rhs)
}
// gethPrime calculates new h prime generators as defined in L64 on pg20.
func gethPrime(h []curves.Point, y curves.Scalar, curve curves.Curve) ([]curves.Point, error) {
hPrime := make([]curves.Point, len(h))
yInv, err := y.Invert()
yInvn := getknVector(yInv, len(h), curve)
if err != nil {
return nil, errors.Wrap(err, "gethPrime")
}
for i, hElem := range h {
hPrime[i] = hElem.Mul(yInvn[i])
}
return hPrime, nil
}
// Obtain P used for IPP verification
// See L67 on pg20
// Note P on L66 includes blinding factor hmu, this method removes that factor.
func getPhmu(proofG, proofHPrime []curves.Point, h, capA, capS curves.Point, x, y, z, mu curves.Scalar, n int, curve curves.Curve) (curves.Point, error) {
// h'^(z*y^n + z^2*2^n)
zyn := multiplyScalarToScalarVector(z, getknVector(y, n, curve))
zsquaretwon := multiplyScalarToScalarVector(z.Square(), get2nVector(n, curve))
elemLastExponent, err := addPairwiseScalarVectors(zyn, zsquaretwon)
if err != nil {
return nil, errors.Wrap(err, "getPhmu")
}
lastElem := curve.Point.SumOfProducts(proofHPrime, elemLastExponent)
// S^x
capSx := capS.Mul(x)
// g^-z --> -z*<1,g>
onen := get1nVector(n, curve)
zNeg := z.Neg()
zinvonen := multiplyScalarToScalarVector(zNeg, onen)
zgdotonen := curve.Point.SumOfProducts(proofG, zinvonen)
// L66 on pg20
P := capA.Add(capSx).Add(zgdotonen).Add(lastElem)
hmu := h.Mul(mu)
Phmu := P.Sub(hmu)
return Phmu, nil
}
// Delta function for delta(y,z), See (39) on pg18.
func deltayz(y, z curves.Scalar, n int, curve curves.Curve) (curves.Scalar, error) {
// z - z^2
zMinuszsquare := z.Sub(z.Square())
// 1^n
onen := get1nVector(n, curve)
// <1^n, y^n>
onendotyn, err := innerProduct(onen, getknVector(y, n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// (z - z^2)*<1^n, y^n>
termFirst := zMinuszsquare.Mul(onendotyn)
// <1^n, 2^n>
onendottwon, err := innerProduct(onen, get2nVector(n, curve))
if err != nil {
return nil, errors.Wrap(err, "deltayz")
}
// z^3*<1^n, 2^n>
termSecond := z.Cube().Mul(onendottwon)
// (z - z^2)*<1^n, y^n> - z^3*<1^n, 2^n>
out := termFirst.Sub(termSecond)
return out, nil
}

View File

@ -0,0 +1,87 @@
package bulletproof
import (
crand "crypto/rand"
"testing"
"github.com/gtank/merlin"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves"
)
func TestRangeVerifyHappyPath(t *testing.T) {
curve := curves.ED25519()
n := 256
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapV(v, gamma, g, h)
verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}
func TestRangeVerifyNotInRange(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.Random(crand.Reader)
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
_, err = prover.Prove(v, gamma, n, proofGenerators, transcript)
require.Error(t, err)
}
func TestRangeVerifyNonRandom(t *testing.T) {
curve := curves.ED25519()
n := 2
prover, err := NewRangeProver(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
v := curve.Scalar.One()
gamma := curve.Scalar.Random(crand.Reader)
g := curve.Point.Random(crand.Reader)
h := curve.Point.Random(crand.Reader)
u := curve.Point.Random(crand.Reader)
proofGenerators := RangeProofGenerators{
g: g,
h: h,
u: u,
}
transcript := merlin.NewTranscript("test")
proof, err := prover.Prove(v, gamma, n, proofGenerators, transcript)
require.NoError(t, err)
verifier, err := NewRangeVerifier(n, []byte("rangeDomain"), []byte("ippDomain"), *curve)
require.NoError(t, err)
transcriptVerifier := merlin.NewTranscript("test")
capV := getcapV(v, gamma, g, h)
verified, err := verifier.Verify(proof, capV, proofGenerators, n, transcriptVerifier)
require.NoError(t, err)
require.True(t, verified)
}

View File

@ -0,0 +1,5 @@
# Core Package
The core package contains a set of primitives, including but not limited to various
elliptic curves, hashes, and commitment schemes. These primitives are used internally
and can also be used independently on their own externally.

View File

@ -0,0 +1,115 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package core
import (
"crypto/hmac"
crand "crypto/rand"
"crypto/sha256"
"crypto/subtle"
"encoding/json"
"fmt"
"hash"
)
// Size of random values and hash outputs are determined by our hash function
const Size = sha256.Size
type (
// Commitment to a given message which can be later revealed.
// This is sent to and held by a verifier until the corresponding
// witness is provided.
Commitment []byte
// Witness is sent to and opened by the verifier. This proves that
// committed message hasn't been altered by later information.
Witness struct {
Msg []byte
r [Size]byte
}
// witnessJSON is used for un/marshaling.
witnessJSON struct {
Msg []byte
R [Size]byte
}
)
// MarshalJSON encodes Witness in JSON
func (w Witness) MarshalJSON() ([]byte, error) {
return json.Marshal(witnessJSON{w.Msg, w.r})
}
// UnmarshalJSON decodes JSON into a Witness struct
func (w *Witness) UnmarshalJSON(data []byte) error {
witness := &witnessJSON{}
err := json.Unmarshal(data, witness)
if err != nil {
return err
}
w.Msg = witness.Msg
w.r = witness.R
return nil
}
// Commit to a given message. Uses SHA256 as the hash function.
func Commit(msg []byte) (Commitment, *Witness, error) {
// Initialize our decommitment
d := Witness{msg, [Size]byte{}}
// Generate a random nonce of the required length
n, err := crand.Read(d.r[:])
// Ensure no errors retrieving nonce
if err != nil {
return nil, nil, err
}
// Ensure we read all the bytes expected
if n != Size {
return nil, nil, fmt.Errorf("failed to read %v bytes from crypto/rand: received %v bytes", Size, n)
}
// Compute the commitment: HMAC(Sha2, msg, key)
c, err := ComputeHMAC(sha256.New, msg, d.r[:])
if err != nil {
return nil, nil, err
}
return c, &d, nil
}
// Open a commitment and return true if the commitment/decommitment pair are valid.
// reference: spec.§2.4: Commitment Scheme
func Open(c Commitment, d Witness) (bool, error) {
// Ensure commitment is well-formed.
if len(c) != Size {
return false, fmt.Errorf("invalid commitment, wrong length. %v != %v", len(c), Size)
}
// Re-compute the commitment: HMAC(Sha2, msg, key)
cʹ, err := ComputeHMAC(sha256.New, d.Msg, d.r[:])
if err != nil {
return false, err
}
return subtle.ConstantTimeCompare(cʹ, c) == 1, nil
}
// ComputeHMAC computes HMAC(hash_fn, msg, key)
// Takes in a hash function to use for HMAC
func ComputeHMAC(f func() hash.Hash, msg []byte, k []byte) ([]byte, error) {
if f == nil {
return nil, fmt.Errorf("hash function cannot be nil")
}
mac := hmac.New(f, k)
w, err := mac.Write(msg)
if w != len(msg) {
return nil, fmt.Errorf("bytes written to hash doesn't match expected: %v != %v", w, len(msg))
} else if err != nil {
return nil, err
}
return mac.Sum(nil), nil
}

View File

@ -0,0 +1,374 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package core
import (
"bytes"
"encoding/json"
"testing"
"github.com/stretchr/testify/require"
)
// An entry into our test table
type entry struct {
// Input
msg []byte
// Result (actual, not expected)
commit Commitment
decommit *Witness
err error
}
// Test inputs and placeholders for results that will be filled in
// during init()
var testResults = []entry{
{[]byte("This is a test message"), nil, nil, nil},
{[]byte("short msg"), nil, nil, nil},
{[]byte("This input field is intentionally longer than the SHA256 block size to ensure that the entire message is processed"),
nil, nil, nil},
{[]byte{0xFB, 0x1A, 0x18, 0x47, 0x39, 0x3C, 0x9F, 0x45, 0x5F, 0x29, 0x4C, 0x51, 0x42, 0x30, 0xA6, 0xB9},
nil, nil, nil},
// msg = \epsilon (empty string)
{[]byte{}, nil, nil, nil},
// msg == nil
{nil, nil, nil, nil},
}
// Run our inputs through commit and record the outputs
func init() {
for i := range testResults {
entry := &testResults[i]
entry.commit, entry.decommit, entry.err = Commit(entry.msg)
}
}
// Computing commitments should never produce errors
func TestCommitWithoutErrors(t *testing.T) {
for _, entry := range testResults {
if entry.err != nil {
t.Errorf("received Commit(%v): %v", entry.msg, entry.err)
}
}
}
// Commitments should be 256b == 64B in length
func TestCommitmentsAreExpectedLength(t *testing.T) {
const expLen = 256 / 8
for _, entry := range testResults {
if len(entry.commit) != expLen {
t.Errorf("commitment is not expected length: %v != %v", len(entry.commit), expLen)
}
}
}
// Decommit cannot be nil
func TestCommmitProducesDecommit(t *testing.T) {
for _, entry := range testResults {
if entry.decommit == nil {
t.Errorf("decommit cannot be nil: Commit(%v)", entry.msg)
}
}
}
// Decommit value should contain the same message
func TestCommmitProducesDecommitWithSameMessage(t *testing.T) {
for _, entry := range testResults {
if !bytes.Equal(entry.msg, entry.decommit.Msg) {
t.Errorf("decommit.msg != msg: %v != %v", entry.msg, entry.decommit.Msg)
}
}
}
// Commitments should be unique
func TestCommmitProducesDistinctCommitments(t *testing.T) {
seen := make(map[[Size]byte]bool)
// Check the pre-computed commitments for uniquness
for _, entry := range testResults {
// Slices cannot be used as hash keys, so we need to copy into
// an array. Oh, go-lang.
var cee [Size]byte
copy(cee[:], entry.commit)
// Ensure each commit is unique
if seen[cee] {
t.Errorf("duplicate commit found: %v", cee)
}
seen[cee] = true
}
}
// Commitments should be unique even for the same message since the nonce is
// randomly selected
func TestCommmitDistinctCommitments(t *testing.T) {
seen := make(map[[Size]byte]bool)
msg := []byte("black lives matter")
const iterations = 1000
// Check the pre-computed commitments for uniquness
for i := 0; i < iterations; i++ {
// Compute a commitment
c, _, err := Commit(msg)
if err != nil {
t.Error(err)
}
// Slices cannot be used as hash keys, so copy into an array
var cee [Size]byte
copy(cee[:], []byte(c))
// Ensure each commit is unique
if seen[cee] {
t.Errorf("duplicate commit found: %v", cee)
}
seen[cee] = true
}
}
// Nonces must be 256b = 64B
func TestCommmitNonceIsExpectedLength(t *testing.T) {
const expLen = 256 / 8
// Check the pre-computed nonces
for _, entry := range testResults {
if len(entry.decommit.r) != expLen {
t.Errorf("nonce is not expected length: %v != %v", len(entry.decommit.r), expLen)
}
}
}
// Randomly selected nonces will be unique with overwhelming probability
func TestCommmitProducesDistinctNonces(t *testing.T) {
seen := make(map[[Size]byte]bool)
msg := []byte("black lives matter")
const iterations = 1000
// Check the pre-computed commitments for uniquness
for i := 0; i < iterations; i++ {
// Compute a commitment
_, dee, err := Commit(msg)
if err != nil {
t.Error(err)
}
// Ensure each nonce is unique
if seen[dee.r] {
t.Errorf("duplicate nonce found: %v", dee.r)
}
seen[dee.r] = true
}
}
func TestOpenOnValidCommitments(t *testing.T) {
for _, entry := range testResults {
// Open each commitment
ok, err := Open(entry.commit, *entry.decommit)
// There should be no error
if err != nil {
t.Error(err)
}
// The commitments should verify
if !ok {
t.Errorf("commitment failed to open: %v", entry.msg)
}
}
}
func TestOpenOnModifiedNonce(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the nonce
dʹ.r[0] ^= 0x40
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
func TestOpenOnZeroPrefixNonce(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the nonce
dʹ.r[0] = 0x00
dʹ.r[1] = 0x00
dʹ.r[2] = 0x00
dʹ.r[3] = 0x00
dʹ.r[4] = 0x00
dʹ.r[5] = 0x00
dʹ.r[6] = 0x00
dʹ.r[7] = 0x00
dʹ.r[8] = 0x00
dʹ.r[9] = 0x00
dʹ.r[10] = 0x00
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// Makes a deep copy of a Witness
func copyWitness(d *Witness) *Witness {
msg := make([]byte, len(d.Msg))
var r [Size]byte
copy(msg, d.Msg)
copy(r[:], d.r[:])
return &Witness{msg, r}
}
// Asserts that err != nil, and ok == false.
func assertFailedOpen(t *testing.T, ok bool, err error) {
// There should be no error
if err != nil {
t.Error(err)
}
// But the commitments should fail
if ok {
t.Error("commitment was verified but was expected to fail")
}
}
// An unrelated message should fail on open
func TestOpenOnNewMessage(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Use a distinct message
dʹ.Msg = []byte("no one expects the spanish inquisition")
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// An appended message should fail on open
func TestOpenOnAppendedMessage(t *testing.T) {
for _, entry := range testResults {
dʹ := copyWitness(entry.decommit)
// Modify the message
dʹ.Msg = []byte("no one expects the spanish inquisition")
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// A modified message should fail on open
func TestOpenOnModifiedMessage(t *testing.T) {
for _, entry := range testResults {
// Skip the empty string message for this test case
if len(entry.msg) == 0 {
continue
}
// Modify the message _in situ_
dʹ := copyWitness(entry.decommit)
dʹ.Msg[1] ^= 0x99
// Open and check for failure
ok, err := Open(entry.commit, *dʹ)
assertFailedOpen(t, ok, err)
}
}
// A modified commitment should fail on open
func TestOpenOnModifiedCommitment(t *testing.T) {
for _, entry := range testResults {
// Copy and then modify the commitment
cʹ := make([]byte, Size)
copy(cʹ[:], entry.commit)
cʹ[6] ^= 0x33
// Open and check for failure
ok, err := Open(cʹ, *entry.decommit)
assertFailedOpen(t, ok, err)
}
}
// An empty decommit should fail to open
func TestOpenOnDefaultDecommitObject(t *testing.T) {
for _, entry := range testResults {
// Open and check for failure
ok, err := Open(entry.commit, Witness{})
assertFailedOpen(t, ok, err)
}
}
// A nil commit should return an error
func TestOpenOnNilCommitment(t *testing.T) {
_, err := Open(nil, Witness{})
assertError(t, err)
}
// Verifies that err != nil
func assertError(t *testing.T, err error) {
if err == nil {
t.Error("expected an error but received nil")
}
}
// Ill-formed commitment should produce an error
func TestOpenOnLongCommitment(t *testing.T) {
tooLong := make([]byte, Size+1)
_, err := Open(tooLong, Witness{})
assertError(t, err)
}
// Ill-formed commitment should produce an error
func TestOpenOnShortCommitment(t *testing.T) {
tooShort := make([]byte, Size-1)
_, err := Open(tooShort, Witness{})
assertError(t, err)
}
// Tests that marshal-unmarshal is the identity function
func TestWitnessMarshalRoundTrip(t *testing.T) {
expected := &Witness{
[]byte("I'm the dude. So that's what you call me"),
[Size]byte{0xAC},
}
// Marhal and test
jsonBytes, err := json.Marshal(expected)
require.NoError(t, err)
require.NotNil(t, jsonBytes)
// Unmarshal and test
actual := &Witness{}
require.NoError(t, json.Unmarshal(jsonBytes, actual))
require.Equal(t, expected.Msg, actual.Msg)
require.Equal(t, expected.r, actual.r)
}
// Tests that marshal-unmarshal is the identity function
func TestCommitmentMarshalRoundTrip(t *testing.T) {
expected := Commitment([]byte("That or uh his-dudeness or duder or el duderino."))
// Marhal and test
jsonBytes, err := json.Marshal(expected)
require.NoError(t, err)
require.NotNil(t, jsonBytes)
// Unmarshal and test
actual := Commitment{}
require.NoError(t, json.Unmarshal(jsonBytes, &actual))
require.Equal(t, []byte(expected), []byte(actual))
}

View File

@ -0,0 +1,3 @@
# Curves
The curves package contains implementation of various elliptic curves.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,517 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
crand "crypto/rand"
"math/big"
"testing"
bls12377 "github.com/consensys/gnark-crypto/ecc/bls12-377"
"github.com/stretchr/testify/require"
)
func TestScalarBls12377G1Random(t *testing.T) {
bls12377g1 := BLS12377G1()
sc := bls12377g1.Scalar.Random(testRng())
s, ok := sc.(*ScalarBls12377)
require.True(t, ok)
expected, _ := new(big.Int).SetString("022a7db6fad5d5ff49108230818187de316bd0b3e5e96f190397bbb9f28e7a8b", 16)
require.Equal(t, s.value, expected)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12377g1.Scalar.Random(crand.Reader)
_, ok := sc.(*ScalarBls12377)
require.True(t, ok)
require.True(t, !sc.IsZero())
}
}
func TestScalarBls12377G1Hash(t *testing.T) {
var b [32]byte
bls12377G1 := BLS12377G1()
sc := bls12377G1.Scalar.Hash(b[:])
s, ok := sc.(*ScalarBls12377)
require.True(t, ok)
expected, _ := new(big.Int).SetString("0c043edae82bf279180b9353139711c1fda5fa64a1f085b80760edaee8f0baca", 16)
require.Equal(t, s.value, expected)
}
func TestScalarBls12377G1Zero(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Scalar.Zero()
require.True(t, sc.IsZero())
require.True(t, sc.IsEven())
}
func TestScalarBls12377G1One(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Scalar.One()
require.True(t, sc.IsOne())
require.True(t, sc.IsOdd())
}
func TestScalarBls12377G1New(t *testing.T) {
bls12377G1 := BLS12377G1()
three := bls12377G1.Scalar.New(3)
require.True(t, three.IsOdd())
four := bls12377G1.Scalar.New(4)
require.True(t, four.IsEven())
neg1 := bls12377G1.Scalar.New(-1)
require.True(t, neg1.IsEven())
neg2 := bls12377G1.Scalar.New(-2)
require.True(t, neg2.IsOdd())
}
func TestScalarBls12377G1Square(t *testing.T) {
bls12377G1 := BLS12377G1()
three := bls12377G1.Scalar.New(3)
nine := bls12377G1.Scalar.New(9)
require.Equal(t, three.Square().Cmp(nine), 0)
}
func TestScalarBls12377G1Cube(t *testing.T) {
bls12377G1 := BLS12377G1()
three := bls12377G1.Scalar.New(3)
twentySeven := bls12377G1.Scalar.New(27)
require.Equal(t, three.Cube().Cmp(twentySeven), 0)
}
func TestScalarBls12377G1Double(t *testing.T) {
bls12377G1 := BLS12377G1()
three := bls12377G1.Scalar.New(3)
six := bls12377G1.Scalar.New(6)
require.Equal(t, three.Double().Cmp(six), 0)
}
func TestScalarBls12377G1Neg(t *testing.T) {
bls12377G1 := BLS12377G1()
one := bls12377G1.Scalar.One()
neg1 := bls12377G1.Scalar.New(-1)
require.Equal(t, one.Neg().Cmp(neg1), 0)
lotsOfThrees := bls12377G1.Scalar.New(333333)
expected := bls12377G1.Scalar.New(-333333)
require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0)
}
func TestScalarBls12377G1Invert(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
actual, _ := nine.Invert()
sa, _ := actual.(*ScalarBls12377)
expected, err := bls12377G1.Scalar.SetBigInt(bhex("0a5f38510051b12ffcd5f1f46c1ef000c0095e8d9000000093d0d55555555556"))
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarBls12377G1Sqrt(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
actual, err := nine.Sqrt()
require.NoError(t, err)
sa, _ := actual.(*ScalarBls12377)
expected, err := bls12377G1.Scalar.SetBigInt(bhex("12ab655e9a2ca55660b44d1e5c37b00159aa76fed00000010a117ffffffffffe"))
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarBls12377G1Add(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
six := bls12377G1.Scalar.New(6)
fifteen := nine.Add(six)
require.NotNil(t, fifteen)
expected := bls12377G1.Scalar.New(15)
require.Equal(t, expected.Cmp(fifteen), 0)
n := new(big.Int).Set(bls12377modulus)
n.Sub(n, big.NewInt(3))
upper, err := bls12377G1.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := upper.Add(nine)
require.NotNil(t, actual)
require.Equal(t, actual.Cmp(six), 0)
}
func TestScalarBls12377G1Sub(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
six := bls12377G1.Scalar.New(6)
n := new(big.Int).Set(bls12377modulus)
n.Sub(n, big.NewInt(3))
expected, err := bls12377G1.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := six.Sub(nine)
require.Equal(t, expected.Cmp(actual), 0)
actual = nine.Sub(six)
require.Equal(t, actual.Cmp(bls12377G1.Scalar.New(3)), 0)
}
func TestScalarBls12377G1Mul(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
six := bls12377G1.Scalar.New(6)
actual := nine.Mul(six)
require.Equal(t, actual.Cmp(bls12377G1.Scalar.New(54)), 0)
n := new(big.Int).Set(bls12377modulus)
n.Sub(n, big.NewInt(1))
upper, err := bls12377G1.Scalar.SetBigInt(n)
require.NoError(t, err)
require.Equal(t, upper.Mul(upper).Cmp(bls12377G1.Scalar.New(1)), 0)
}
func TestScalarBls12377G1Div(t *testing.T) {
bls12377G1 := BLS12377G1()
nine := bls12377G1.Scalar.New(9)
actual := nine.Div(nine)
require.Equal(t, actual.Cmp(bls12377G1.Scalar.New(1)), 0)
require.Equal(t, bls12377G1.Scalar.New(54).Div(nine).Cmp(bls12377G1.Scalar.New(6)), 0)
}
func TestScalarBls12377G1Serialize(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Scalar.New(255)
sequence := sc.Bytes()
require.Equal(t, len(sequence), 32)
require.Equal(t, sequence, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff})
ret, err := bls12377G1.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
// Try 10 random values
for i := 0; i < 10; i++ {
sc = bls12377G1.Scalar.Random(crand.Reader)
sequence = sc.Bytes()
require.Equal(t, len(sequence), 32)
ret, err = bls12377G1.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
}
}
func TestScalarBls12377G1Nil(t *testing.T) {
bls12377G1 := BLS12377G1()
one := bls12377G1.Scalar.New(1)
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, one.Div(nil))
require.Nil(t, bls12377G1.Scalar.Random(nil))
require.Equal(t, one.Cmp(nil), -2)
_, err := bls12377G1.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestScalarBls12377Point(t *testing.T) {
bls12377G1 := BLS12377G1()
_, ok := bls12377G1.Scalar.Point().(*PointBls12377G1)
require.True(t, ok)
bls12377G2 := BLS12377G2()
_, ok = bls12377G2.Scalar.Point().(*PointBls12377G2)
require.True(t, ok)
}
func TestPointBls12377G2Random(t *testing.T) {
bls12377G2 := BLS12377G2()
sc := bls12377G2.Point.Random(testRng())
s, ok := sc.(*PointBls12377G2)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("2deeb99988cc46605a5e8eeb50b2c52fc4a12b4537aa8a149431ca85bac2017a32d2a3bf8411d5145bdf587f162a1b01a106e89ebf3210c0926ba07681cd84fc8ae2409b396b24730a8b851d05ba3293b82ae341c472d626c1f55da16ba46d", 16)
expectedY, _ := new(big.Int).SetString("b17be752bc4a8ff05824fcf974d232cebe07ee333ce879bf8c7b88ce18813cb190e8a45eddbd7cc5a4b68993ed17770094ab97b85b70b0b80e89c854336b85e46c7259070fb6606b03bcab12d96438f9a79353fafe11733aed51bfa4e798b8", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12377G2.Point.Random(crand.Reader)
_, ok := sc.(*PointBls12377G2)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
}
}
func TestPointBls12377G2Hash(t *testing.T) {
var b [32]byte
bls12377G2 := BLS12377G2()
sc := bls12377G2.Point.Hash(b[:])
s, ok := sc.(*PointBls12377G2)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("014eec1848d84be62f3a5778353ea6c2b0db859508bc40ff2c1387f0a4b2a167fedbe6b10f946f33c600623d7b96dc8200ef8b67c1e07c4dc522f25deb617ad8251199d235da8bc7700332c8416aa204f81e6bebd914e46acea095d3083b7723", 16)
expectedY, _ := new(big.Int).SetString("015c17fb5e37ce1284fa5f10cca9a55be5a5e4d821649294ab820a6f044f55337665df04a940ee7f5d937aff69196b010168d9090eb791d4b21752622f1fd5fb0f4c44bfd83e2cf6d332b02343999fac3de660ca84aff40b428f25b5378fe648", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
}
func TestPointBls12377G2Identity(t *testing.T) {
bls12377G2 := BLS12377G2()
sc := bls12377G2.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0})
}
func TestPointBls12377G2Generator(t *testing.T) {
bls12377G2 := BLS12377G2()
sc := bls12377G2.Point.Generator()
s, ok := sc.(*PointBls12377G2)
require.True(t, ok)
_, _, _, g2Aff := bls12377.Generators()
require.True(t, s.value.Equal(&g2Aff))
}
func TestPointBls12377G2Set(t *testing.T) {
bls12377G2 := BLS12377G2()
iden, err := bls12377G2.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
_, _, _, g2Aff := bls12377.Generators()
generator := g2Aff.Bytes()
_, err = bls12377G2.Point.Set(new(big.Int).SetBytes(generator[:96]), new(big.Int).SetBytes(generator[96:]))
require.NoError(t, err)
}
func TestPointBls12377G2Double(t *testing.T) {
bls12377G2 := BLS12377G2()
g := bls12377G2.Point.Generator()
gg2 := g.Double()
require.True(t, gg2.Equal(g.Mul(bls12377G2.Scalar.New(2))))
i := bls12377G2.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointBls12377G2Neg(t *testing.T) {
bls12377G2 := BLS12377G1()
g := bls12377G2.Point.Generator().Neg()
require.True(t, g.Neg().Equal(bls12377G2.Point.Generator()))
require.True(t, bls12377G2.Point.Identity().Neg().Equal(bls12377G2.Point.Identity()))
}
func TestPointBls12377G2Add(t *testing.T) {
bls12377G2 := BLS12377G2()
pt := bls12377G2.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(bls12377G2.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointBls12377G2Sub(t *testing.T) {
bls12377G2 := BLS12377G2()
g := bls12377G2.Point.Generator()
pt := bls12377G2.Point.Generator().Mul(bls12377G2.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointBls12377G2Mul(t *testing.T) {
bls12377G2 := BLS12377G2()
g := bls12377G2.Point.Generator()
pt := bls12377G2.Point.Generator().Mul(bls12377G2.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointBls12377G2Serialize(t *testing.T) {
bls12377G2 := BLS12377G2()
ss := bls12377G2.Scalar.Random(testRng())
g := bls12377G2.Point.Generator()
ppt := g.Mul(ss)
require.Equal(t, ppt.ToAffineCompressed(), []byte{0x81, 0x88, 0xf4, 0x32, 0xec, 0x60, 0x72, 0xd3, 0x76, 0x77, 0x86, 0xcd, 0x44, 0xce, 0x91, 0x5a, 0x3a, 0xb5, 0x13, 0xe2, 0x81, 0x10, 0x41, 0xa1, 0x39, 0x1e, 0xe2, 0x8a, 0x5f, 0x5f, 0xf1, 0x2e, 0x72, 0xab, 0xc5, 0x62, 0x5c, 0x99, 0x56, 0x5d, 0xd4, 0x33, 0x9a, 0x61, 0x63, 0xd4, 0x8e, 0x7c, 0x0, 0xec, 0x46, 0xb, 0xc4, 0x22, 0xd9, 0xe0, 0x74, 0xe6, 0x79, 0x7b, 0x55, 0x8d, 0x8f, 0x9b, 0xf7, 0x59, 0x65, 0x10, 0x97, 0xe3, 0x12, 0x18, 0xd3, 0x76, 0x3, 0x58, 0x87, 0xc7, 0x82, 0x4c, 0x42, 0x80, 0xa2, 0xa, 0x3d, 0x66, 0xfe, 0xb6, 0xed, 0xd9, 0x38, 0x45, 0x5, 0xbe, 0x40, 0x32})
require.Equal(t, ppt.ToAffineUncompressed(), []byte{0x1, 0x88, 0xf4, 0x32, 0xec, 0x60, 0x72, 0xd3, 0x76, 0x77, 0x86, 0xcd, 0x44, 0xce, 0x91, 0x5a, 0x3a, 0xb5, 0x13, 0xe2, 0x81, 0x10, 0x41, 0xa1, 0x39, 0x1e, 0xe2, 0x8a, 0x5f, 0x5f, 0xf1, 0x2e, 0x72, 0xab, 0xc5, 0x62, 0x5c, 0x99, 0x56, 0x5d, 0xd4, 0x33, 0x9a, 0x61, 0x63, 0xd4, 0x8e, 0x7c, 0x0, 0xec, 0x46, 0xb, 0xc4, 0x22, 0xd9, 0xe0, 0x74, 0xe6, 0x79, 0x7b, 0x55, 0x8d, 0x8f, 0x9b, 0xf7, 0x59, 0x65, 0x10, 0x97, 0xe3, 0x12, 0x18, 0xd3, 0x76, 0x3, 0x58, 0x87, 0xc7, 0x82, 0x4c, 0x42, 0x80, 0xa2, 0xa, 0x3d, 0x66, 0xfe, 0xb6, 0xed, 0xd9, 0x38, 0x45, 0x5, 0xbe, 0x40, 0x32, 0x0, 0xd, 0x69, 0x94, 0x48, 0x5e, 0x3, 0xd4, 0x51, 0x2a, 0xf6, 0xa, 0xf0, 0x4b, 0xd8, 0x42, 0xc4, 0xc3, 0x66, 0xb8, 0x77, 0x15, 0xaf, 0x8b, 0xee, 0x68, 0xc3, 0xfe, 0x16, 0x4d, 0xd0, 0x5b, 0x97, 0xc, 0x16, 0x6c, 0xfe, 0x9e, 0xc, 0xe0, 0xe3, 0x15, 0x33, 0x6c, 0x81, 0xc1, 0x93, 0x1e, 0x1, 0x13, 0xfc, 0x17, 0xf2, 0x9c, 0xe0, 0x61, 0xe4, 0x58, 0x3a, 0xba, 0xed, 0xd9, 0x2f, 0x54, 0xdd, 0xc3, 0x7f, 0xdf, 0xc0, 0x31, 0x89, 0x1f, 0xf3, 0xcf, 0x9c, 0xac, 0x7c, 0xd, 0x91, 0x8a, 0x84, 0xf8, 0xab, 0xcc, 0x77, 0x55, 0xb6, 0x72, 0xf4, 0xb0, 0x13, 0x45, 0xbb, 0x3d, 0x44, 0xfe})
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := bls12377G2.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 96)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 192)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointBls12377G2Nil(t *testing.T) {
bls12377G2 := BLS12377G2()
one := bls12377G2.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, bls12377G2.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := bls12377G2.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointBls12377G1Random(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Point.Random(testRng())
s, ok := sc.(*PointBls12377G1)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("facd83174df271a2dbd7d84f02f4d1b6a61850a926e7ec5ca34e558378feb146231e5e105fa27310843db23a49ca53", 16)
expectedY, _ := new(big.Int).SetString("4fa90bd4c90a2d4afd01bf0f561ab112bc13bb7c0faa87a2324febab2c3fa1ff47b2ed1dd9e38b1c660dd6d2ec0a7b", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12377G1.Point.Random(crand.Reader)
_, ok := sc.(*PointBls12377G1)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
}
}
func TestPointBls12377G1Hash(t *testing.T) {
var b [32]byte
bls12377G1 := BLS12377G1()
sc := bls12377G1.Point.Hash(b[:])
s, ok := sc.(*PointBls12377G1)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("8c1f4dd215430f2a1c01e1f50eded8de37033e5b70b9987c93547e0b8ec87ca918039d41e5e634773e1bcbe1e2d836", 16)
expectedY, _ := new(big.Int).SetString("552b0bde9c7b051118a5619cf409cd9d2b25a1ebb5e35b7c7bd031f8c15f1d08979e634d2acd1b7be4ccb43a064393", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
}
func TestPointBls12377G1Identity(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{0xc0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0})
}
func TestPointBls12377G1Generator(t *testing.T) {
bls12377G1 := BLS12377G1()
sc := bls12377G1.Point.Generator()
s, ok := sc.(*PointBls12377G1)
require.True(t, ok)
_, _, g1Aff, _ := bls12377.Generators()
require.True(t, s.value.Equal(&g1Aff))
}
func TestPointBls12377G1Set(t *testing.T) {
bls12377G1 := BLS12377G1()
iden, err := bls12377G1.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
_, _, g1Aff, _ := bls12377.Generators()
generator := g1Aff.Bytes()
_, err = bls12377G1.Point.Set(new(big.Int).SetBytes(generator[:48]), new(big.Int).SetBytes(generator[48:]))
require.NoError(t, err)
}
func TestPointBls12377G1Double(t *testing.T) {
bls12377G1 := BLS12377G1()
g := bls12377G1.Point.Generator()
g2 := g.Double()
require.True(t, g2.Equal(g.Mul(bls12377G1.Scalar.New(2))))
i := bls12377G1.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointBls12377G1Neg(t *testing.T) {
bls12377G1 := BLS12377G1()
g := bls12377G1.Point.Generator().Neg()
require.True(t, g.Neg().Equal(bls12377G1.Point.Generator()))
require.True(t, bls12377G1.Point.Identity().Neg().Equal(bls12377G1.Point.Identity()))
}
func TestPointBls12377G1Add(t *testing.T) {
bls12377G1 := BLS12377G1()
pt := bls12377G1.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(bls12377G1.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointBls12377G1Sub(t *testing.T) {
bls12377G1 := BLS12377G1()
g := bls12377G1.Point.Generator()
pt := bls12377G1.Point.Generator().Mul(bls12377G1.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointBls12377G1Mul(t *testing.T) {
bls12377G1 := BLS12377G1()
g := bls12377G1.Point.Generator()
pt := bls12377G1.Point.Generator().Mul(bls12377G1.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointBls12377G1Serialize(t *testing.T) {
bls12377G1 := BLS12377G1()
ss := bls12377G1.Scalar.Random(testRng())
g := bls12377G1.Point.Generator()
ppt := g.Mul(ss)
require.Equal(t, ppt.ToAffineCompressed(), []byte{0xa0, 0xd0, 0xae, 0xea, 0xaa, 0xf1, 0xf6, 0x0, 0x59, 0x39, 0x33, 0x3c, 0x60, 0x16, 0xaf, 0x68, 0x86, 0x2d, 0x3a, 0xc1, 0x73, 0x24, 0xdd, 0x2, 0xb6, 0x49, 0xde, 0xf, 0xe7, 0x42, 0xe8, 0x10, 0xf, 0xab, 0xd1, 0x63, 0xed, 0x13, 0xda, 0x0, 0x69, 0x1b, 0x20, 0x7d, 0xcd, 0x71, 0x7, 0xef})
require.Equal(t, ppt.ToAffineUncompressed(), []byte{0x0, 0xd0, 0xae, 0xea, 0xaa, 0xf1, 0xf6, 0x0, 0x59, 0x39, 0x33, 0x3c, 0x60, 0x16, 0xaf, 0x68, 0x86, 0x2d, 0x3a, 0xc1, 0x73, 0x24, 0xdd, 0x2, 0xb6, 0x49, 0xde, 0xf, 0xe7, 0x42, 0xe8, 0x10, 0xf, 0xab, 0xd1, 0x63, 0xed, 0x13, 0xda, 0x0, 0x69, 0x1b, 0x20, 0x7d, 0xcd, 0x71, 0x7, 0xef, 0x1, 0x78, 0x3f, 0xbd, 0xd4, 0xbd, 0x7c, 0xf5, 0x7a, 0xfd, 0x33, 0x45, 0x7, 0x39, 0xf2, 0xb7, 0x10, 0x4c, 0x1e, 0xc5, 0x2b, 0x93, 0x4, 0x67, 0x54, 0x88, 0x8b, 0x57, 0x69, 0xf, 0x74, 0x40, 0xf4, 0x58, 0x5b, 0xd7, 0x76, 0x63, 0x58, 0xd9, 0x7b, 0x6d, 0x46, 0x8e, 0x50, 0x89, 0xc4, 0x7d})
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := bls12377G1.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 48)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 96)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointBls12377G1Nil(t *testing.T) {
bls12377G1 := BLS12377G1()
one := bls12377G1.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, bls12377G1.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := bls12377G1.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointBls12377G1SumOfProducts(t *testing.T) {
lhs := new(PointBls12377G1).Generator().Mul(new(ScalarBls12377).New(50))
points := make([]Point, 5)
for i := range points {
points[i] = new(PointBls12377G1).Generator()
}
scalars := []Scalar{
new(ScalarBls12377).New(8),
new(ScalarBls12377).New(9),
new(ScalarBls12377).New(10),
new(ScalarBls12377).New(11),
new(ScalarBls12377).New(12),
}
rhs := lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,517 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
crand "crypto/rand"
"math/big"
"testing"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/bls12381"
)
func TestScalarBls12381G1Random(t *testing.T) {
bls12381g1 := BLS12381G1()
sc := bls12381g1.Scalar.Random(testRng())
s, ok := sc.(*ScalarBls12381)
require.True(t, ok)
expected, _ := new(big.Int).SetString("1208bca85f538782d3941c7e805b239d181247a3c0ab58db6b1c8848804df8c8", 16)
require.Equal(t, s.Value.BigInt(), expected)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12381g1.Scalar.Random(crand.Reader)
_, ok := sc.(*ScalarBls12381)
require.True(t, ok)
require.True(t, !sc.IsZero())
}
}
func TestScalarBls12381G1Hash(t *testing.T) {
var b [32]byte
bls12381G1 := BLS12381G1()
sc := bls12381G1.Scalar.Hash(b[:])
s, ok := sc.(*ScalarBls12381)
require.True(t, ok)
expected, _ := new(big.Int).SetString("07ec86a2ab79613fc0294e058151ddc74db38b0cde95a4678eb91f1258f31b40", 16)
require.Equal(t, s.Value.BigInt(), expected)
}
func TestScalarBls12381G1Zero(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Scalar.Zero()
require.True(t, sc.IsZero())
require.True(t, sc.IsEven())
}
func TestScalarBls12381G1One(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Scalar.One()
require.True(t, sc.IsOne())
require.True(t, sc.IsOdd())
}
func TestScalarBls12381G1New(t *testing.T) {
bls12381G1 := BLS12381G1()
three := bls12381G1.Scalar.New(3)
require.True(t, three.IsOdd())
four := bls12381G1.Scalar.New(4)
require.True(t, four.IsEven())
neg1 := bls12381G1.Scalar.New(-1)
require.True(t, neg1.IsEven())
neg2 := bls12381G1.Scalar.New(-2)
require.True(t, neg2.IsOdd())
}
func TestScalarBls12381G1Square(t *testing.T) {
bls12381G1 := BLS12381G1()
three := bls12381G1.Scalar.New(3)
nine := bls12381G1.Scalar.New(9)
require.Equal(t, three.Square().Cmp(nine), 0)
}
func TestScalarBls12381G1Cube(t *testing.T) {
bls12381G1 := BLS12381G1()
three := bls12381G1.Scalar.New(3)
twentySeven := bls12381G1.Scalar.New(27)
require.Equal(t, three.Cube().Cmp(twentySeven), 0)
}
func TestScalarBls12381G1Double(t *testing.T) {
bls12381G1 := BLS12381G1()
three := bls12381G1.Scalar.New(3)
six := bls12381G1.Scalar.New(6)
require.Equal(t, three.Double().Cmp(six), 0)
}
func TestScalarBls12381G1Neg(t *testing.T) {
bls12381G1 := BLS12381G1()
one := bls12381G1.Scalar.One()
neg1 := bls12381G1.Scalar.New(-1)
require.Equal(t, one.Neg().Cmp(neg1), 0)
lotsOfThrees := bls12381G1.Scalar.New(333333)
expected := bls12381G1.Scalar.New(-333333)
require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0)
}
func TestScalarBls12381G1Invert(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
actual, _ := nine.Invert()
sa, _ := actual.(*ScalarBls12381)
expected, err := bls12381G1.Scalar.SetBigInt(bhex("19c308bd25b13848eef068e557794c72f62a247271c6bf1c38e38e38aaaaaaab"))
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarBls12381G1Sqrt(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
actual, err := nine.Sqrt()
require.NoError(t, err)
sa, _ := actual.(*ScalarBls12381)
expected, err := bls12381G1.Scalar.SetBigInt(bhex("73eda753299d7d483339d80809a1d80553bda402fffe5bfefffffffefffffffe"))
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarBls12381G1Add(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
six := bls12381G1.Scalar.New(6)
fifteen := nine.Add(six)
require.NotNil(t, fifteen)
expected := bls12381G1.Scalar.New(15)
require.Equal(t, expected.Cmp(fifteen), 0)
qq := bls12381.Bls12381FqNew()
n := new(big.Int).Set(qq.Params.BiModulus)
n.Sub(n, big.NewInt(3))
upper, err := bls12381G1.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := upper.Add(nine)
require.NotNil(t, actual)
require.Equal(t, actual.Cmp(six), 0)
}
func TestScalarBls12381G1Sub(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
six := bls12381G1.Scalar.New(6)
qq := bls12381.Bls12381FqNew()
n := new(big.Int).Set(qq.Params.BiModulus)
n.Sub(n, big.NewInt(3))
expected, err := bls12381G1.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := six.Sub(nine)
require.Equal(t, expected.Cmp(actual), 0)
actual = nine.Sub(six)
require.Equal(t, actual.Cmp(bls12381G1.Scalar.New(3)), 0)
}
func TestScalarBls12381G1Mul(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
six := bls12381G1.Scalar.New(6)
actual := nine.Mul(six)
require.Equal(t, actual.Cmp(bls12381G1.Scalar.New(54)), 0)
qq := bls12381.Bls12381FqNew()
n := new(big.Int).Set(qq.Params.BiModulus)
n.Sub(n, big.NewInt(1))
upper, err := bls12381G1.Scalar.SetBigInt(n)
require.NoError(t, err)
require.Equal(t, upper.Mul(upper).Cmp(bls12381G1.Scalar.New(1)), 0)
}
func TestScalarBls12381G1Div(t *testing.T) {
bls12381G1 := BLS12381G1()
nine := bls12381G1.Scalar.New(9)
actual := nine.Div(nine)
require.Equal(t, actual.Cmp(bls12381G1.Scalar.New(1)), 0)
require.Equal(t, bls12381G1.Scalar.New(54).Div(nine).Cmp(bls12381G1.Scalar.New(6)), 0)
}
func TestScalarBls12381G1Serialize(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Scalar.New(255)
sequence := sc.Bytes()
require.Equal(t, len(sequence), 32)
require.Equal(t, sequence, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff})
ret, err := bls12381G1.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
// Try 10 random values
for i := 0; i < 10; i++ {
sc = bls12381G1.Scalar.Random(crand.Reader)
sequence = sc.Bytes()
require.Equal(t, len(sequence), 32)
ret, err = bls12381G1.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
}
}
func TestScalarBls12381G1Nil(t *testing.T) {
bls12381G1 := BLS12381G1()
one := bls12381G1.Scalar.New(1)
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, one.Div(nil))
require.Nil(t, bls12381G1.Scalar.Random(nil))
require.Equal(t, one.Cmp(nil), -2)
_, err := bls12381G1.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestScalarBls12381Point(t *testing.T) {
bls12381G1 := BLS12381G1()
_, ok := bls12381G1.Scalar.Point().(*PointBls12381G1)
require.True(t, ok)
bls12381G2 := BLS12381G2()
_, ok = bls12381G2.Scalar.Point().(*PointBls12381G2)
require.True(t, ok)
}
func TestPointBls12381G2Random(t *testing.T) {
bls12381G2 := BLS12381G2()
sc := bls12381G2.Point.Random(testRng())
s, ok := sc.(*PointBls12381G2)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("13520facd10fc1cd71384d86b445b0e65ac1bf9205e86cd02837c064d1886b8aa3dc5348845bb06216601de5628315600967df84901b1c4f1fac87f9fc13d02f9c3a0f8cf462c86d2b4bbddf7b8520a3df2a5c541724a2c7ddc9eec45f0b2f74", 16)
expectedY, _ := new(big.Int).SetString("0a46cb3d91222e4eb068e1eb41e7ef3efd1c705c1272476d74064541661736bf0910adcfe37fafbabf0989e0c9ae122b0ce11d941d60570a9b39ff332e09f9ba661a4aac019911032b1ddb0dee7ce5a34aebb8cb6f1fa21e5cf565d06dfc7b61", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12381G2.Point.Random(crand.Reader)
_, ok := sc.(*PointBls12381G2)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
}
}
func TestPointBls12381G2Hash(t *testing.T) {
var b [32]byte
bls12381G2 := BLS12381G2()
sc := bls12381G2.Point.Hash(b[:])
s, ok := sc.(*PointBls12381G2)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("15060db402f549b74be2656006f369e0892a857cd7d16738761ad9ba01bf8da2d1e45f86c4f13fe0850ba1195a2e8aa91914cb4d8eac1e4582a45d92cd2c8fec3d34c11629503dafe60f7910e39eff6f6b6d41d881e9fb2b9857c06de7966077", 16)
expectedY, _ := new(big.Int).SetString("0e509ea244e9f57d3a6f5140b39792424fb0889b5a3cad7f65d84cf9f3fccf64bec9ff45fc1f0c8fb7f045930336363217b27f340cd6f8bbf15fb1872a4e137c9655aad86672fa4d7e9973c39eec102069a36c632f7f90e6ec75b23dd6accafc", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
}
func TestPointBls12381G2Identity(t *testing.T) {
bls12381G2 := BLS12381G2()
sc := bls12381G2.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{0xc0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0})
}
func TestPointBls12381G2Generator(t *testing.T) {
bls12381G2 := BLS12381G2()
sc := bls12381G2.Point.Generator()
s, ok := sc.(*PointBls12381G2)
require.True(t, ok)
require.Equal(t, 1, s.Value.Equal(new(bls12381.G2).Generator()))
}
func TestPointBls12381G2Set(t *testing.T) {
bls12381G2 := BLS12381G2()
iden, err := bls12381G2.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
generator := new(bls12381.G2).Generator().ToUncompressed()
_, err = bls12381G2.Point.Set(new(big.Int).SetBytes(generator[:96]), new(big.Int).SetBytes(generator[96:]))
require.NoError(t, err)
}
func TestPointBls12381G2Double(t *testing.T) {
bls12381G2 := BLS12381G2()
g := bls12381G2.Point.Generator()
gg2 := g.Double()
require.True(t, gg2.Equal(g.Mul(bls12381G2.Scalar.New(2))))
i := bls12381G2.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointBls12381G2Neg(t *testing.T) {
bls12381G2 := BLS12381G1()
g := bls12381G2.Point.Generator().Neg()
require.True(t, g.Neg().Equal(bls12381G2.Point.Generator()))
require.True(t, bls12381G2.Point.Identity().Neg().Equal(bls12381G2.Point.Identity()))
}
func TestPointBls12381G2Add(t *testing.T) {
bls12381G2 := BLS12381G2()
pt := bls12381G2.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(bls12381G2.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointBls12381G2Sub(t *testing.T) {
bls12381G2 := BLS12381G2()
g := bls12381G2.Point.Generator()
pt := bls12381G2.Point.Generator().Mul(bls12381G2.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointBls12381G2Mul(t *testing.T) {
bls12381G2 := BLS12381G2()
g := bls12381G2.Point.Generator()
pt := bls12381G2.Point.Generator().Mul(bls12381G2.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointBls12381G2Serialize(t *testing.T) {
bls12381G2 := BLS12381G2()
ss := bls12381G2.Scalar.Random(testRng())
g := bls12381G2.Point.Generator()
ppt := g.Mul(ss)
require.Equal(t, ppt.ToAffineCompressed(), []byte{0xad, 0x49, 0x42, 0x28, 0xc6, 0x2c, 0x54, 0xb3, 0xfd, 0xb0, 0xed, 0xd1, 0x8f, 0x10, 0x1f, 0x9a, 0x9a, 0xc5, 0x68, 0x57, 0xff, 0x99, 0x93, 0x6e, 0x8d, 0x79, 0x95, 0xc3, 0xc9, 0xa8, 0xdf, 0x99, 0x63, 0xa2, 0x67, 0xe4, 0xaa, 0x62, 0x9c, 0x33, 0xb0, 0x54, 0x5e, 0xb6, 0xd6, 0x36, 0xa4, 0x0, 0x12, 0x9b, 0x9b, 0x7f, 0x27, 0xce, 0x26, 0x29, 0xf3, 0xa4, 0xf0, 0x8d, 0xfb, 0x48, 0x6d, 0xc7, 0x73, 0xa0, 0x18, 0x84, 0xc7, 0x98, 0xb7, 0xa7, 0xb1, 0x8, 0x88, 0xe9, 0x21, 0xe1, 0xed, 0x61, 0x3c, 0x37, 0xf7, 0xf3, 0xc1, 0x4f, 0x95, 0xfa, 0x64, 0xda, 0x39, 0x32, 0x4, 0x95, 0x87, 0x44})
require.Equal(t, ppt.ToAffineUncompressed(), []byte{0xd, 0x49, 0x42, 0x28, 0xc6, 0x2c, 0x54, 0xb3, 0xfd, 0xb0, 0xed, 0xd1, 0x8f, 0x10, 0x1f, 0x9a, 0x9a, 0xc5, 0x68, 0x57, 0xff, 0x99, 0x93, 0x6e, 0x8d, 0x79, 0x95, 0xc3, 0xc9, 0xa8, 0xdf, 0x99, 0x63, 0xa2, 0x67, 0xe4, 0xaa, 0x62, 0x9c, 0x33, 0xb0, 0x54, 0x5e, 0xb6, 0xd6, 0x36, 0xa4, 0x0, 0x12, 0x9b, 0x9b, 0x7f, 0x27, 0xce, 0x26, 0x29, 0xf3, 0xa4, 0xf0, 0x8d, 0xfb, 0x48, 0x6d, 0xc7, 0x73, 0xa0, 0x18, 0x84, 0xc7, 0x98, 0xb7, 0xa7, 0xb1, 0x8, 0x88, 0xe9, 0x21, 0xe1, 0xed, 0x61, 0x3c, 0x37, 0xf7, 0xf3, 0xc1, 0x4f, 0x95, 0xfa, 0x64, 0xda, 0x39, 0x32, 0x4, 0x95, 0x87, 0x44, 0x12, 0x37, 0xbe, 0xd8, 0xbf, 0xdb, 0x2a, 0xcc, 0xd, 0x17, 0xcc, 0x6c, 0xfe, 0x7f, 0x49, 0xe4, 0x5d, 0xaf, 0xa3, 0x54, 0xb0, 0xe3, 0xc5, 0x86, 0xff, 0x20, 0x64, 0x30, 0x65, 0xc, 0x7c, 0x7c, 0x2f, 0x80, 0xee, 0x7c, 0x74, 0xd2, 0x8d, 0x2e, 0x92, 0xba, 0x16, 0xe4, 0x13, 0xa6, 0x6, 0xfd, 0x9, 0x3e, 0xd8, 0x59, 0xcc, 0x59, 0x3b, 0xa8, 0x64, 0x44, 0x6b, 0xc, 0xba, 0xd9, 0x9a, 0x2c, 0xed, 0x2d, 0xe4, 0x1e, 0xf1, 0xe6, 0xda, 0x8a, 0xfc, 0x62, 0x1d, 0xf, 0x3b, 0xdf, 0xf6, 0xe5, 0xa3, 0xd1, 0xcd, 0xec, 0x21, 0x73, 0x1f, 0x7d, 0xb2, 0x26, 0x7d, 0x5f, 0xf2, 0xd2, 0xc9, 0x4})
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := bls12381G2.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 96)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 192)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointBls12381G2Nil(t *testing.T) {
bls12381G2 := BLS12381G2()
one := bls12381G2.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, bls12381G2.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := bls12381G2.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointBls12381G1Random(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Point.Random(testRng())
s, ok := sc.(*PointBls12381G1)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("191b78617711a9aca6092c50d8c715db4856b84e48b9aa07dc42719335751b2ef3dfa2f6f15afc6dba2d0fb3be63dd83", 16)
expectedY, _ := new(big.Int).SetString("0d7053b5d9b5f23839a0dc4ad18bb55bd6ac20e1e53750c1140e434c61f87033e6338f10955b690eee0efc383d6e6d25", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := bls12381G1.Point.Random(crand.Reader)
_, ok := sc.(*PointBls12381G1)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
}
}
func TestPointBls12381G1Hash(t *testing.T) {
var b [32]byte
bls12381G1 := BLS12381G1()
sc := bls12381G1.Point.Hash(b[:])
s, ok := sc.(*PointBls12381G1)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("1239150a658a8b04d56f3d14593bb3fa6f791ee221224480b5170da43a4c3602f97be83649c31b2738a606b89c2e9fea", 16)
expectedY, _ := new(big.Int).SetString("124af4bc2008ed9be7db7137f8b41e4b65f37cfd34938c4466531dc7ed657e66ff6c6c6912488d9285e0645c6ba62b92", 16)
require.Equal(t, s.X(), expectedX)
require.Equal(t, s.Y(), expectedY)
}
func TestPointBls12381G1Identity(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{0xc0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0})
}
func TestPointBls12381G1Generator(t *testing.T) {
bls12381G1 := BLS12381G1()
sc := bls12381G1.Point.Generator()
s, ok := sc.(*PointBls12381G1)
g := new(bls12381.G1).Generator()
require.True(t, ok)
require.Equal(t, 1, s.Value.Equal(g))
}
func TestPointBls12381G1Set(t *testing.T) {
bls12381G1 := BLS12381G1()
iden, err := bls12381G1.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
generator := new(bls12381.G1).Generator().ToUncompressed()
_, err = bls12381G1.Point.Set(new(big.Int).SetBytes(generator[:48]), new(big.Int).SetBytes(generator[48:]))
require.NoError(t, err)
}
func TestPointBls12381G1Double(t *testing.T) {
bls12381G1 := BLS12381G1()
g := bls12381G1.Point.Generator()
g2 := g.Double()
require.True(t, g2.Equal(g.Mul(bls12381G1.Scalar.New(2))))
i := bls12381G1.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointBls12381G1Neg(t *testing.T) {
bls12381G1 := BLS12381G1()
g := bls12381G1.Point.Generator().Neg()
require.True(t, g.Neg().Equal(bls12381G1.Point.Generator()))
require.True(t, bls12381G1.Point.Identity().Neg().Equal(bls12381G1.Point.Identity()))
}
func TestPointBls12381G1Add(t *testing.T) {
bls12381G1 := BLS12381G1()
pt := bls12381G1.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(bls12381G1.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointBls12381G1Sub(t *testing.T) {
bls12381G1 := BLS12381G1()
g := bls12381G1.Point.Generator()
pt := bls12381G1.Point.Generator().Mul(bls12381G1.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointBls12381G1Mul(t *testing.T) {
bls12381G1 := BLS12381G1()
g := bls12381G1.Point.Generator()
pt := bls12381G1.Point.Generator().Mul(bls12381G1.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointBls12381G1Serialize(t *testing.T) {
bls12381G1 := BLS12381G1()
ss := bls12381G1.Scalar.Random(testRng())
g := bls12381G1.Point.Generator()
ppt := g.Mul(ss)
require.Equal(t, ppt.ToAffineCompressed(), []byte{0xa8, 0x6d, 0xac, 0x3a, 0xd8, 0x6f, 0x6b, 0x1f, 0x6b, 0x47, 0x7f, 0x22, 0x73, 0xa9, 0x5a, 0x5f, 0x4c, 0xff, 0x1a, 0xf7, 0x27, 0xab, 0x73, 0x51, 0xfe, 0xa5, 0xfd, 0x9d, 0x21, 0xcd, 0xaa, 0x40, 0x7f, 0xf9, 0x5, 0xca, 0x2f, 0x9a, 0xdb, 0x5d, 0x5b, 0x6a, 0x86, 0xb3, 0x84, 0xc6, 0xc, 0x37})
require.Equal(t, ppt.ToAffineUncompressed(), []byte{0x8, 0x6d, 0xac, 0x3a, 0xd8, 0x6f, 0x6b, 0x1f, 0x6b, 0x47, 0x7f, 0x22, 0x73, 0xa9, 0x5a, 0x5f, 0x4c, 0xff, 0x1a, 0xf7, 0x27, 0xab, 0x73, 0x51, 0xfe, 0xa5, 0xfd, 0x9d, 0x21, 0xcd, 0xaa, 0x40, 0x7f, 0xf9, 0x5, 0xca, 0x2f, 0x9a, 0xdb, 0x5d, 0x5b, 0x6a, 0x86, 0xb3, 0x84, 0xc6, 0xc, 0x37, 0x10, 0x5f, 0x99, 0x9a, 0x58, 0x93, 0x4, 0x35, 0x76, 0x91, 0x7c, 0x8e, 0x6a, 0xcb, 0x3c, 0xad, 0xdb, 0x84, 0x3, 0xd9, 0x24, 0xec, 0xa2, 0xa8, 0x4e, 0x99, 0x4f, 0xbb, 0x77, 0x3a, 0x3f, 0x9a, 0xd, 0x64, 0x9d, 0x76, 0xe, 0x61, 0xfb, 0x60, 0x36, 0x55, 0x91, 0x5c, 0x49, 0x20, 0x43, 0x29})
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := bls12381G1.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 48)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 96)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointBls12381G1Nil(t *testing.T) {
bls12381G1 := BLS12381G1()
one := bls12381G1.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, bls12381G1.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := bls12381G1.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointBls12381G1SumOfProducts(t *testing.T) {
lhs := new(PointBls12381G1).Generator().Mul(new(ScalarBls12381).New(50))
points := make([]Point, 5)
for i := range points {
points[i] = new(PointBls12381G1).Generator()
}
scalars := []Scalar{
new(ScalarBls12381).New(8),
new(ScalarBls12381).New(9),
new(ScalarBls12381).New(10),
new(ScalarBls12381).New(11),
new(ScalarBls12381).New(12),
}
rhs := lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,944 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
// Copyright Quilibrium, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
"encoding/hex"
"encoding/json"
"fmt"
"hash"
"io"
"math/big"
"sync"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/bls12381"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/bls48581"
)
var (
k256Initonce sync.Once
k256 Curve
bls12381g1Initonce sync.Once
bls12381g1 Curve
bls12381g2Initonce sync.Once
bls12381g2 Curve
bls48581g1Initonce sync.Once
bls48581g1 Curve
bls48581g2Initonce sync.Once
bls48581g2 Curve
bls12377g1Initonce sync.Once
bls12377g1 Curve
bls12377g2Initonce sync.Once
bls12377g2 Curve
p256Initonce sync.Once
p256 Curve
ed25519Initonce sync.Once
ed25519 Curve
ed448Initonce sync.Once
ed448 Curve
pallasInitonce sync.Once
pallas Curve
)
const (
K256Name = "secp256k1"
BLS12381G1Name = "BLS12381G1"
BLS12381G2Name = "BLS12381G2"
BLS12831Name = "BLS12831"
BLS48581G1Name = "BLS48581G1"
BLS48581G2Name = "BLS48581G2"
BLS48581Name = "BLS48581"
P256Name = "P-256"
ED25519Name = "ed25519"
ED448Name = "ed448"
PallasName = "pallas"
BLS12377G1Name = "BLS12377G1"
BLS12377G2Name = "BLS12377G2"
BLS12377Name = "BLS12377"
)
const scalarBytes = 32
// Scalar represents an element of the scalar field \mathbb{F}_q
// of the elliptic curve construction.
type Scalar interface {
// Random returns a random scalar using the provided reader
// to retrieve bytes
Random(reader io.Reader) Scalar
// Hash the specific bytes in a manner to yield a
// uniformly distributed scalar
Hash(bytes []byte) Scalar
// Zero returns the additive identity element
Zero() Scalar
// One returns the multiplicative identity element
One() Scalar
// IsZero returns true if this element is the additive identity element
IsZero() bool
// IsOne returns true if this element is the multiplicative identity element
IsOne() bool
// IsOdd returns true if this element is odd
IsOdd() bool
// IsEven returns true if this element is even
IsEven() bool
// New returns an element with the value equal to `value`
New(value int) Scalar
// Cmp returns
// -2 if this element is in a different field than rhs
// -1 if this element is less than rhs
// 0 if this element is equal to rhs
// 1 if this element is greater than rhs
Cmp(rhs Scalar) int
// Square returns element*element
Square() Scalar
// Double returns element+element
Double() Scalar
// Invert returns element^-1 mod p
Invert() (Scalar, error)
// Sqrt computes the square root of this element if it exists.
Sqrt() (Scalar, error)
// Cube returns element*element*element
Cube() Scalar
// Add returns element+rhs
Add(rhs Scalar) Scalar
// Sub returns element-rhs
Sub(rhs Scalar) Scalar
// Mul returns element*rhs
Mul(rhs Scalar) Scalar
// MulAdd returns element * y + z mod p
MulAdd(y, z Scalar) Scalar
// Div returns element*rhs^-1 mod p
Div(rhs Scalar) Scalar
// Neg returns -element mod p
Neg() Scalar
// SetBigInt returns this element set to the value of v
SetBigInt(v *big.Int) (Scalar, error)
// BigInt returns this element as a big integer
BigInt() *big.Int
// Point returns the associated point for this scalar
Point() Point
// Bytes returns the canonical byte representation of this scalar
Bytes() []byte
// SetBytes creates a scalar from the canonical representation expecting the exact number of bytes needed to represent the scalar
SetBytes(bytes []byte) (Scalar, error)
// SetBytesWide creates a scalar expecting double the exact number of bytes needed to represent the scalar which is reduced by the modulus
SetBytesWide(bytes []byte) (Scalar, error)
// Clone returns a cloned Scalar of this value
Clone() Scalar
}
type PairingScalar interface {
Scalar
SetPoint(p Point) PairingScalar
}
func unmarshalScalar(input []byte) (*Curve, []byte, error) {
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, nil, fmt.Errorf("unrecognized curve")
}
return curve, input[i+1:], nil
}
func scalarMarshalBinary(scalar Scalar) ([]byte, error) {
// All scalars are 32 bytes long
// The last 32 bytes are the actual value
// The first remaining bytes are the curve name
// separated by a colon
name := []byte(scalar.Point().CurveName())
output := make([]byte, len(name)+1+scalarBytes)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
copy(output[len(name)+1:], scalar.Bytes())
return output, nil
}
func scalarUnmarshalBinary(input []byte) (Scalar, error) {
// All scalars are 32 bytes long
// The first 32 bytes are the actual value
// The remaining bytes are the curve name
if len(input) < scalarBytes+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sc, data, err := unmarshalScalar(input)
if err != nil {
return nil, err
}
return sc.Scalar.SetBytes(data)
}
func scalarMarshalText(scalar Scalar) ([]byte, error) {
// All scalars are 32 bytes long
// For text encoding we put the curve name first for readability
// separated by a colon, then the hex encoding of the scalar
// which avoids the base64 weakness with strict mode or not
name := []byte(scalar.Point().CurveName())
output := make([]byte, len(name)+1+scalarBytes*2)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
_ = hex.Encode(output[len(name)+1:], scalar.Bytes())
return output, nil
}
func scalarUnmarshalText(input []byte) (Scalar, error) {
if len(input) < scalarBytes*2+len(P256Name)+1 {
return nil, fmt.Errorf("invalid byte sequence")
}
curve, data, err := unmarshalScalar(input)
if err != nil {
return nil, err
}
var t [scalarBytes]byte
_, err = hex.Decode(t[:], data)
if err != nil {
return nil, err
}
return curve.Scalar.SetBytes(t[:])
}
func scalarMarshalJson(scalar Scalar) ([]byte, error) {
m := make(map[string]string, 2)
m["type"] = scalar.Point().CurveName()
m["value"] = hex.EncodeToString(scalar.Bytes())
return json.Marshal(m)
}
func scalarUnmarshalJson(input []byte) (Scalar, error) {
var m map[string]string
err := json.Unmarshal(input, &m)
if err != nil {
return nil, err
}
curve := GetCurveByName(m["type"])
if curve == nil {
return nil, fmt.Errorf("invalid type")
}
s, err := hex.DecodeString(m["value"])
if err != nil {
return nil, err
}
S, err := curve.Scalar.SetBytes(s)
if err != nil {
return nil, err
}
return S, nil
}
// Point represents an elliptic curve point
type Point interface {
Random(reader io.Reader) Point
Hash(bytes []byte) Point
Identity() Point
Generator() Point
IsIdentity() bool
IsNegative() bool
IsOnCurve() bool
Double() Point
Scalar() Scalar
Neg() Point
Add(rhs Point) Point
Sub(rhs Point) Point
Mul(rhs Scalar) Point
Equal(rhs Point) bool
Set(x, y *big.Int) (Point, error)
ToAffineCompressed() []byte
ToAffineUncompressed() []byte
FromAffineCompressed(bytes []byte) (Point, error)
FromAffineUncompressed(bytes []byte) (Point, error)
CurveName() string
SumOfProducts(points []Point, scalars []Scalar) Point
}
type PairingPoint interface {
Point
OtherGroup() PairingPoint
Pairing(rhs PairingPoint) Scalar
MultiPairing(...PairingPoint) Scalar
}
func pointMarshalBinary(point Point) ([]byte, error) {
// Always stores points in compressed form
// The first bytes are the curve name
// separated by a colon followed by the compressed point
// bytes
t := point.ToAffineCompressed()
name := []byte(point.CurveName())
output := make([]byte, len(name)+1+len(t))
copy(output[:len(name)], name)
output[len(name)] = byte(':')
copy(output[len(output)-len(t):], t)
return output, nil
}
func pointUnmarshalBinary(input []byte) (Point, error) {
if len(input) < scalarBytes+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, fmt.Errorf("unrecognized curve")
}
return curve.Point.FromAffineCompressed(input[i+1:])
}
func pointMarshalText(point Point) ([]byte, error) {
// Always stores points in compressed form
// The first bytes are the curve name
// separated by a colon followed by the compressed point
// bytes
t := point.ToAffineCompressed()
name := []byte(point.CurveName())
output := make([]byte, len(name)+1+len(t)*2)
copy(output[:len(name)], name)
output[len(name)] = byte(':')
hex.Encode(output[len(output)-len(t)*2:], t)
return output, nil
}
func pointUnmarshalText(input []byte) (Point, error) {
if len(input) < scalarBytes*2+1+len(P256Name) {
return nil, fmt.Errorf("invalid byte sequence")
}
sep := byte(':')
i := 0
for ; i < len(input); i++ {
if input[i] == sep {
break
}
}
name := string(input[:i])
curve := GetCurveByName(name)
if curve == nil {
return nil, fmt.Errorf("unrecognized curve")
}
buffer := make([]byte, (len(input)-i)/2)
_, err := hex.Decode(buffer, input[i+1:])
if err != nil {
return nil, err
}
return curve.Point.FromAffineCompressed(buffer)
}
func pointMarshalJson(point Point) ([]byte, error) {
m := make(map[string]string, 2)
m["type"] = point.CurveName()
m["value"] = hex.EncodeToString(point.ToAffineCompressed())
return json.Marshal(m)
}
func pointUnmarshalJson(input []byte) (Point, error) {
var m map[string]string
err := json.Unmarshal(input, &m)
if err != nil {
return nil, err
}
curve := GetCurveByName(m["type"])
if curve == nil {
return nil, fmt.Errorf("invalid type")
}
p, err := hex.DecodeString(m["value"])
if err != nil {
return nil, err
}
P, err := curve.Point.FromAffineCompressed(p)
if err != nil {
return nil, err
}
return P, nil
}
// Curve represents a named elliptic curve with a scalar field and point group
type Curve struct {
Scalar Scalar
Point Point
Name string
}
func (c Curve) ScalarBaseMult(sc Scalar) Point {
return c.Point.Generator().Mul(sc)
}
func (c Curve) NewGeneratorPoint() Point {
return c.Point.Generator()
}
func (c Curve) NewIdentityPoint() Point {
return c.Point.Identity()
}
func (c Curve) NewScalar() Scalar {
return c.Scalar.Zero()
}
// ToEllipticCurve returns the equivalent of this curve as the go interface `elliptic.Curve`
func (c Curve) ToEllipticCurve() (elliptic.Curve, error) {
err := fmt.Errorf("can't convert %s", c.Name)
switch c.Name {
case K256Name:
return K256Curve(), nil
case BLS12381G1Name:
return nil, err
case BLS12381G2Name:
return nil, err
case BLS12831Name:
return nil, err
case P256Name:
return NistP256Curve(), nil
case ED25519Name:
return nil, err
case PallasName:
return nil, err
case BLS12377G1Name:
return nil, err
case BLS12377G2Name:
return nil, err
case BLS12377Name:
return nil, err
default:
return nil, err
}
}
// PairingCurve represents a named elliptic curve
// that supports pairings
type PairingCurve struct {
Scalar PairingScalar
PointG1 PairingPoint
PointG2 PairingPoint
GT Scalar
Name string
}
func (c PairingCurve) ScalarG1BaseMult(sc Scalar) PairingPoint {
return c.PointG1.Generator().Mul(sc).(PairingPoint)
}
func (c PairingCurve) ScalarG2BaseMult(sc Scalar) PairingPoint {
return c.PointG2.Generator().Mul(sc).(PairingPoint)
}
func (c PairingCurve) NewG1GeneratorPoint() PairingPoint {
return c.PointG1.Generator().(PairingPoint)
}
func (c PairingCurve) NewG2GeneratorPoint() PairingPoint {
return c.PointG2.Generator().(PairingPoint)
}
func (c PairingCurve) NewG1IdentityPoint() PairingPoint {
return c.PointG1.Identity().(PairingPoint)
}
func (c PairingCurve) NewG2IdentityPoint() PairingPoint {
return c.PointG2.Identity().(PairingPoint)
}
func (c PairingCurve) NewScalar() PairingScalar {
return c.Scalar.Zero().(PairingScalar)
}
// GetCurveByName returns the correct `Curve` given the name
func GetCurveByName(name string) *Curve {
switch name {
case K256Name:
return K256()
case BLS12381G1Name:
return BLS12381G1()
case BLS12381G2Name:
return BLS12381G2()
case BLS12831Name:
return BLS12381G1()
case P256Name:
return P256()
case ED25519Name:
return ED25519()
case ED448Name:
return ED448()
case PallasName:
return PALLAS()
case BLS12377G1Name:
return BLS12377G1()
case BLS12377G2Name:
return BLS12377G2()
case BLS12377Name:
return BLS12377G1()
default:
return nil
}
}
func GetPairingCurveByName(name string) *PairingCurve {
switch name {
case BLS12381G1Name:
return BLS12381(BLS12381G1().NewIdentityPoint())
case BLS12381G2Name:
return BLS12381(BLS12381G2().NewIdentityPoint())
case BLS12831Name:
return BLS12381(BLS12381G1().NewIdentityPoint())
default:
return nil
}
}
// BLS12381G1 returns the BLS12-381 curve with points in G1
func BLS12381G1() *Curve {
bls12381g1Initonce.Do(bls12381g1Init)
return &bls12381g1
}
func bls12381g1Init() {
bls12381g1 = Curve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: new(PointBls12381G1),
},
Point: new(PointBls12381G1).Identity(),
Name: BLS12381G1Name,
}
}
// BLS12381G2 returns the BLS12-381 curve with points in G2
func BLS12381G2() *Curve {
bls12381g2Initonce.Do(bls12381g2Init)
return &bls12381g2
}
func bls12381g2Init() {
bls12381g2 = Curve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: new(PointBls12381G2),
},
Point: new(PointBls12381G2).Identity(),
Name: BLS12381G2Name,
}
}
func BLS12381(preferredPoint Point) *PairingCurve {
return &PairingCurve{
Scalar: &ScalarBls12381{
Value: bls12381.Bls12381FqNew(),
point: preferredPoint,
},
PointG1: &PointBls12381G1{
Value: new(bls12381.G1).Identity(),
},
PointG2: &PointBls12381G2{
Value: new(bls12381.G2).Identity(),
},
GT: &ScalarBls12381Gt{
Value: new(bls12381.Gt).SetOne(),
},
Name: BLS12831Name,
}
}
// BLS48581G1 returns the BLS48-581 curve with points in G1
func BLS48581G1() *Curve {
bls48581g1Initonce.Do(bls48581g1Init)
return &bls48581g1
}
func bls48581g1Init() {
bls48581g1 = Curve{
Scalar: &ScalarBls48581{
Value: bls48581.NewBIGint(1),
point: new(PointBls48581G1),
},
Point: new(PointBls48581G1).Identity(),
Name: BLS12381G1Name,
}
}
// BLS48581G2 returns the BLS48-581 curve with points in G2
func BLS48581G2() *Curve {
bls48581g2Initonce.Do(bls48581g2Init)
return &bls48581g2
}
func bls48581g2Init() {
bls48581g2 = Curve{
Scalar: &ScalarBls48581{
Value: bls48581.NewBIGint(1),
point: new(PointBls48581G2),
},
Point: new(PointBls48581G2).Identity(),
Name: BLS48581G2Name,
}
}
func BLS48581(preferredPoint Point) *PairingCurve {
return &PairingCurve{
Scalar: &ScalarBls48581{
Value: bls48581.NewBIG(),
point: preferredPoint,
},
PointG1: &PointBls48581G1{
Value: bls48581.ECP_generator(),
},
PointG2: &PointBls48581G2{
Value: bls48581.ECP8_generator(),
},
GT: &ScalarBls48581Gt{
Value: bls48581.NewFP48int(1),
},
Name: BLS48581Name,
}
}
// BLS12377G1 returns the BLS12-377 curve with points in G1
func BLS12377G1() *Curve {
bls12377g1Initonce.Do(bls12377g1Init)
return &bls12377g1
}
func bls12377g1Init() {
bls12377g1 = Curve{
Scalar: &ScalarBls12377{
value: new(big.Int),
point: new(PointBls12377G1),
},
Point: new(PointBls12377G1).Identity(),
Name: BLS12377G1Name,
}
}
// BLS12377G2 returns the BLS12-377 curve with points in G2
func BLS12377G2() *Curve {
bls12377g2Initonce.Do(bls12377g2Init)
return &bls12377g2
}
func bls12377g2Init() {
bls12377g2 = Curve{
Scalar: &ScalarBls12377{
value: new(big.Int),
point: new(PointBls12377G2),
},
Point: new(PointBls12377G2).Identity(),
Name: BLS12377G2Name,
}
}
// K256 returns the secp256k1 curve
func K256() *Curve {
k256Initonce.Do(k256Init)
return &k256
}
func k256Init() {
k256 = Curve{
Scalar: new(ScalarK256).Zero(),
Point: new(PointK256).Identity(),
Name: K256Name,
}
}
func P256() *Curve {
p256Initonce.Do(p256Init)
return &p256
}
func p256Init() {
p256 = Curve{
Scalar: new(ScalarP256).Zero(),
Point: new(PointP256).Identity(),
Name: P256Name,
}
}
func ED25519() *Curve {
ed25519Initonce.Do(ed25519Init)
return &ed25519
}
func ed25519Init() {
ed25519 = Curve{
Scalar: new(ScalarEd25519).Zero(),
Point: new(PointEd25519).Identity(),
Name: ED25519Name,
}
}
func ED448() *Curve {
ed448Initonce.Do(ed448Init)
return &ed448
}
func ed448Init() {
ed448 = Curve{
Scalar: new(ScalarEd448).Zero(),
Point: new(PointEd448).Identity(),
Name: ED448Name,
}
}
func PALLAS() *Curve {
pallasInitonce.Do(pallasInit)
return &pallas
}
func pallasInit() {
pallas = Curve{
Scalar: new(ScalarPallas).Zero(),
Point: new(PointPallas).Identity(),
Name: PallasName,
}
}
// https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#appendix-G.2.1
func osswu3mod4(u *big.Int, p *sswuParams) (x, y *big.Int) {
params := p.Params
field := NewField(p.Params.P)
tv1 := field.NewElement(u)
tv1 = tv1.Mul(tv1) // tv1 = u^2
tv3 := field.NewElement(p.Z).Mul(tv1) // tv3 = Z * tv1
tv2 := tv3.Mul(tv3) // tv2 = tv3^2
xd := tv2.Add(tv3) // xd = tv2 + tv3
x1n := xd.Add(field.One()) // x1n = (xd + 1)
x1n = x1n.Mul(field.NewElement(p.B)) // x1n * B
aNeg := field.NewElement(p.A).Neg()
xd = xd.Mul(aNeg) // xd = -A * xd
if xd.Value.Cmp(big.NewInt(0)) == 0 {
xd = field.NewElement(p.Z).Mul(field.NewElement(p.A)) // xd = Z * A
}
tv2 = xd.Mul(xd) // tv2 = xd^2
gxd := tv2.Mul(xd) // gxd = tv2 * xd
tv2 = tv2.Mul(field.NewElement(p.A)) // tv2 = A * tv2
gx1 := x1n.Mul(x1n) // gx1 = x1n^2
gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2
gx1 = gx1.Mul(x1n) // gx1 = gx1 * x1n
tv2 = gxd.Mul(field.NewElement(p.B)) // tv2 = B * gxd
gx1 = gx1.Add(tv2) // gx1 = gx1 + tv2
tv4 := gxd.Mul(gxd) // tv4 = gxd^2
tv2 = gx1.Mul(gxd) // tv2 = gx1 * gxd
tv4 = tv4.Mul(tv2) //tv4 = tv4 * tv2
y1 := tv4.Pow(field.NewElement(p.C1))
y1 = y1.Mul(tv2) //y1 = y1 * tv2
x2n := tv3.Mul(x1n) // x2n = tv3 * x1n
y2 := y1.Mul(field.NewElement(p.C2)) // y2 = y1 * c2
y2 = y2.Mul(tv1) // y2 = y2 * tv1
y2 = y2.Mul(field.NewElement(u)) // y2 = y2 * u
tv2 = y1.Mul(y1) // tv2 = y1^2
tv2 = tv2.Mul(gxd) // tv2 = tv2 * gxd
e2 := tv2.Value.Cmp(gx1.Value) == 0
// If e2, x = x1, else x = x2
if e2 {
x = x1n.Value
} else {
x = x2n.Value
}
// xn / xd
x.Mul(x, new(big.Int).ModInverse(xd.Value, params.P))
x.Mod(x, params.P)
// If e2, y = y1, else y = y2
if e2 {
y = y1.Value
} else {
y = y2.Value
}
uBytes := u.Bytes()
yBytes := y.Bytes()
usign := uBytes[len(uBytes)-1] & 1
ysign := yBytes[len(yBytes)-1] & 1
// Fix sign of y
if usign != ysign {
y.Neg(y)
y.Mod(y, params.P)
}
return
}
func expandMsgXmd(h hash.Hash, msg, domain []byte, outLen int) ([]byte, error) {
domainLen := uint8(len(domain))
if domainLen > 255 {
return nil, fmt.Errorf("invalid domain length")
}
// DST_prime = DST || I2OSP(len(DST), 1)
// b_0 = H(Z_pad || msg || l_i_b_str || I2OSP(0, 1) || DST_prime)
_, _ = h.Write(make([]byte, h.BlockSize()))
_, _ = h.Write(msg)
_, _ = h.Write([]byte{uint8(outLen >> 8), uint8(outLen)})
_, _ = h.Write([]byte{0})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
b0 := h.Sum(nil)
// b_1 = H(b_0 || I2OSP(1, 1) || DST_prime)
h.Reset()
_, _ = h.Write(b0)
_, _ = h.Write([]byte{1})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
b1 := h.Sum(nil)
// b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime)
ell := (outLen + h.Size() - 1) / h.Size()
bi := b1
out := make([]byte, outLen)
for i := 1; i < ell; i++ {
h.Reset()
// b_i = H(strxor(b_0, b_(i - 1)) || I2OSP(i, 1) || DST_prime)
tmp := make([]byte, h.Size())
for j := 0; j < h.Size(); j++ {
tmp[j] = b0[j] ^ bi[j]
}
_, _ = h.Write(tmp)
_, _ = h.Write([]byte{1 + uint8(i)})
_, _ = h.Write(domain)
_, _ = h.Write([]byte{domainLen})
// b_1 || ... || b_(ell - 1)
copy(out[(i-1)*h.Size():i*h.Size()], bi[:])
bi = h.Sum(nil)
}
// b_ell
copy(out[(ell-1)*h.Size():], bi[:])
return out[:outLen], nil
}
func bhex(s string) *big.Int {
r, _ := new(big.Int).SetString(s, 16)
return r
}
type sswuParams struct {
Params *elliptic.CurveParams
C1, C2, A, B, Z *big.Int
}
// sumOfProductsPippenger implements a version of Pippenger's algorithm.
//
// The algorithm works as follows:
//
// Let `n` be a number of point-scalar pairs.
// Let `w` be a window of bits (6..8, chosen based on `n`, see cost factor).
//
// 1. Prepare `2^(w-1) - 1` buckets with indices `[1..2^(w-1))` initialized with identity points.
// Bucket 0 is not needed as it would contain points multiplied by 0.
// 2. Convert scalars to a radix-`2^w` representation with signed digits in `[-2^w/2, 2^w/2]`.
// Note: only the last digit may equal `2^w/2`.
// 3. Starting with the last window, for each point `i=[0..n)` add it to a a bucket indexed by
// the point's scalar's value in the window.
// 4. Once all points in a window are sorted into buckets, add buckets by multiplying each
// by their index. Efficient way of doing it is to start with the last bucket and compute two sums:
// intermediate sum from the last to the first, and the full sum made of all intermediate sums.
// 5. Shift the resulting sum of buckets by `w` bits by using `w` doublings.
// 6. Add to the return value.
// 7. Repeat the loop.
//
// Approximate cost w/o wNAF optimizations (A = addition, D = doubling):
//
// ```ascii
// cost = (n*A + 2*(2^w/2)*A + w*D + A)*256/w
// | | | | |
// | | | | looping over 256/w windows
// | | | adding to the result
// sorting points | shifting the sum by w bits (to the next window, starting from last window)
// one by one |
// into buckets adding/subtracting all buckets
// multiplied by their indexes
// using a sum of intermediate sums
// ```
//
// For large `n`, dominant factor is (n*256/w) additions.
// However, if `w` is too big and `n` is not too big, then `(2^w/2)*A` could dominate.
// Therefore, the optimal choice of `w` grows slowly as `n` grows.
//
// For constant time we use a fixed window of 6
//
// This algorithm is adapted from section 4 of <https://eprint.iacr.org/2012/549.pdf>.
// and https://cacr.uwaterloo.ca/techreports/2010/cacr2010-26.pdf
func sumOfProductsPippenger(points []Point, scalars []*big.Int) Point {
if len(points) != len(scalars) {
return nil
}
const w = 6
bucketSize := (1 << w) - 1
windows := make([]Point, 255/w+1)
for i := range windows {
windows[i] = points[0].Identity()
}
bucket := make([]Point, bucketSize)
for j := 0; j < len(windows); j++ {
for i := 0; i < bucketSize; i++ {
bucket[i] = points[0].Identity()
}
for i := 0; i < len(scalars); i++ {
index := bucketSize & int(new(big.Int).Rsh(scalars[i], uint(w*j)).Int64())
if index != 0 {
bucket[index-1] = bucket[index-1].Add(points[i])
}
}
acc, sum := windows[j].Identity(), windows[j].Identity()
for i := bucketSize - 1; i >= 0; i-- {
sum = sum.Add(bucket[i])
acc = acc.Add(sum)
}
windows[j] = acc
}
acc := windows[0].Identity()
for i := len(windows) - 1; i >= 0; i-- {
for j := 0; j < w; j++ {
acc = acc.Double()
}
acc = acc.Add(windows[i])
}
return acc
}

View File

@ -0,0 +1,256 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
"encoding/json"
"fmt"
"math/big"
"github.com/btcsuite/btcd/btcec"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core"
)
var curveNameToId = map[string]byte{
"secp256k1": 0,
"P-224": 1,
"P-256": 2,
"P-384": 3,
"P-521": 4,
}
var curveIdToName = map[byte]func() elliptic.Curve{
0: func() elliptic.Curve { return btcec.S256() },
1: elliptic.P224,
2: elliptic.P256,
3: elliptic.P384,
4: elliptic.P521,
}
var curveMapper = map[string]func() elliptic.Curve{
"secp256k1": func() elliptic.Curve { return btcec.S256() },
"P-224": elliptic.P224,
"P-256": elliptic.P256,
"P-384": elliptic.P384,
"P-521": elliptic.P521,
}
// EcPoint represents an elliptic curve Point
type EcPoint struct {
Curve elliptic.Curve
X, Y *big.Int
}
// EcPointJson encapsulates the data that is serialized to JSON
// used internally and not for external use. Public so other pieces
// can use for serialization
type EcPointJson struct {
CurveName string
X, Y *big.Int
}
// MarshalJSON serializes
func (a EcPoint) MarshalJSON() ([]byte, error) {
return json.Marshal(EcPointJson{
CurveName: a.Curve.Params().Name,
X: a.X,
Y: a.Y,
})
}
func (a *EcPoint) UnmarshalJSON(bytes []byte) error {
data := new(EcPointJson)
err := json.Unmarshal(bytes, data)
if err != nil {
return err
}
if mapper, ok := curveMapper[data.CurveName]; ok {
a.Curve = mapper()
a.X = data.X
a.Y = data.Y
return nil
}
return fmt.Errorf("unknown curve deserialized")
}
func (a *EcPoint) MarshalBinary() ([]byte, error) {
result := [65]byte{}
if code, ok := curveNameToId[a.Curve.Params().Name]; ok {
result[0] = code
a.X.FillBytes(result[1:33])
a.Y.FillBytes(result[33:65])
return result[:], nil
}
return nil, fmt.Errorf("unknown curve serialized")
}
func (a *EcPoint) UnmarshalBinary(data []byte) error {
if mapper, ok := curveIdToName[data[0]]; ok {
a.Curve = mapper()
a.X = new(big.Int).SetBytes(data[1:33])
a.Y = new(big.Int).SetBytes(data[33:65])
return nil
}
return fmt.Errorf("unknown curve deserialized")
}
func (a EcPoint) IsValid() bool {
return a.IsOnCurve() || a.IsIdentity()
}
func (a EcPoint) IsOnCurve() bool {
return a.Curve.IsOnCurve(a.X, a.Y)
}
// IsIdentity returns true if this Point is the Point at infinity
func (a EcPoint) IsIdentity() bool {
x := core.ConstantTimeEqByte(a.X, core.Zero)
y := core.ConstantTimeEqByte(a.Y, core.Zero)
return (x & y) == 1
}
// Equals return true if a + b have the same x,y coordinates
func (a EcPoint) Equals(b *EcPoint) bool {
if !sameCurve(&a, b) {
return false
}
// Next, compare coords to determine equality
x := core.ConstantTimeEqByte(a.X, b.X)
y := core.ConstantTimeEqByte(a.Y, b.Y)
return (x & y) == 1
}
// IsBasePoint returns true if this Point is curve's base Point
func (a EcPoint) IsBasePoint() bool {
p := a.Curve.Params()
x := core.ConstantTimeEqByte(a.X, p.Gx)
y := core.ConstantTimeEqByte(a.Y, p.Gy)
return (x & y) == 1
}
// Normalizes the Scalar to a positive element smaller than the base Point order.
func reduceModN(curve elliptic.Curve, k *big.Int) *big.Int {
return new(big.Int).Mod(k, curve.Params().N)
}
// Add performs elliptic curve addition on two points
func (a *EcPoint) Add(b *EcPoint) (*EcPoint, error) {
// Validate parameters
if a == nil || b == nil {
return nil, internal.ErrNilArguments
}
// Only add points from the same curve
if !sameCurve(a, b) {
return nil, internal.ErrPointsDistinctCurves
}
p := &EcPoint{Curve: a.Curve}
p.X, p.Y = a.Curve.Add(a.X, a.Y, b.X, b.Y)
if !p.IsValid() {
return nil, internal.ErrNotOnCurve
}
return p, nil
}
// Neg returns the negation of a Weierstrass Point.
func (a *EcPoint) Neg() (*EcPoint, error) {
// Validate parameters
if a == nil {
return nil, internal.ErrNilArguments
}
// Only add points from the same curve
p := &EcPoint{Curve: a.Curve, X: a.X, Y: new(big.Int).Sub(a.Curve.Params().P, a.Y)}
if !p.IsValid() {
return nil, internal.ErrNotOnCurve
}
return p, nil
}
// ScalarMult multiplies this Point by a Scalar
func (a *EcPoint) ScalarMult(k *big.Int) (*EcPoint, error) {
if a == nil || k == nil {
return nil, fmt.Errorf("cannot multiply nil Point or element")
}
n := reduceModN(a.Curve, k)
p := new(EcPoint)
p.Curve = a.Curve
p.X, p.Y = a.Curve.ScalarMult(a.X, a.Y, n.Bytes())
if !p.IsValid() {
return nil, fmt.Errorf("result not on the curve")
}
return p, nil
}
// NewScalarBaseMult creates a Point from the base Point multiplied by a field element
func NewScalarBaseMult(curve elliptic.Curve, k *big.Int) (*EcPoint, error) {
if curve == nil || k == nil {
return nil, fmt.Errorf("nil parameters are not supported")
}
n := reduceModN(curve, k)
p := new(EcPoint)
p.Curve = curve
p.X, p.Y = curve.ScalarBaseMult(n.Bytes())
if !p.IsValid() {
return nil, fmt.Errorf("result not on the curve")
}
return p, nil
}
// Bytes returns the bytes represented by this Point with x || y
func (a EcPoint) Bytes() []byte {
fieldSize := internal.CalcFieldSize(a.Curve)
out := make([]byte, fieldSize*2)
a.X.FillBytes(out[0:fieldSize])
a.Y.FillBytes(out[fieldSize : fieldSize*2])
return out
}
// PointFromBytesUncompressed outputs uncompressed X || Y similar to
// https://www.secg.org/sec1-v1.99.dif.pdf section 2.2 and 2.3
func PointFromBytesUncompressed(curve elliptic.Curve, b []byte) (*EcPoint, error) {
fieldSize := internal.CalcFieldSize(curve)
if len(b) != fieldSize*2 {
return nil, fmt.Errorf("invalid number of bytes")
}
p := &EcPoint{
Curve: curve,
X: new(big.Int).SetBytes(b[:fieldSize]),
Y: new(big.Int).SetBytes(b[fieldSize:]),
}
if !p.IsValid() {
return nil, fmt.Errorf("invalid Point")
}
return p, nil
}
// sameCurve determines if points a,b appear to be from the same curve
func sameCurve(a, b *EcPoint) bool {
// Handle identical pointers and double-nil
if a == b {
return true
}
// Handle one nil pointer
if a == nil || b == nil {
return false
}
aParams := a.Curve.Params()
bParams := b.Curve.Params()
// Use curve order and name
return aParams.P == bParams.P &&
aParams.N == bParams.N &&
aParams.B == bParams.B &&
aParams.BitSize == bParams.BitSize &&
aParams.Gx == bParams.Gx &&
aParams.Gy == bParams.Gy &&
aParams.Name == bParams.Name
}

View File

@ -0,0 +1,370 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"bytes"
"crypto/elliptic"
"math/big"
"testing"
"github.com/btcsuite/btcd/btcec"
"github.com/stretchr/testify/require"
tt "source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core"
)
func TestIsIdentity(t *testing.T) {
// Should be Point at infinity
identity := &EcPoint{btcec.S256(), core.Zero, core.Zero}
require.True(t, identity.IsIdentity())
}
func TestNewScalarBaseMultZero(t *testing.T) {
// Should be Point at infinity
curve := btcec.S256()
num := big.NewInt(0)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
}
}
func TestNewScalarBaseMultOne(t *testing.T) {
// Should be base Point
curve := btcec.S256()
num := big.NewInt(1)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(p.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) {
t.Errorf("NewScalarBaseMult should've returned the base Point.")
}
}
func TestNewScalarBaseMultNeg(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(-1)
p, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if p == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
num.Mod(num, curve.N)
e, err := NewScalarBaseMult(curve, num)
if err != nil {
t.Errorf("NewScalarBaseMult failed: %v", err)
}
if e == nil {
t.Errorf("NewScalarBaseMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(p.Bytes(), e.Bytes()) {
t.Errorf("NewScalarBaseMult should've returned the %v, found: %v", e, p)
}
}
func TestScalarMultZero(t *testing.T) {
// Should be Point at infinity
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(0)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !q.IsIdentity() {
t.Errorf("ScalarMult should've returned the identity Point.")
}
}
func TestScalarMultOne(t *testing.T) {
// Should be base Point
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(1)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(q.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) {
t.Errorf("ScalarMult should've returned the base Point.")
}
}
func TestScalarMultNeg(t *testing.T) {
curve := btcec.S256()
p := &EcPoint{
Curve: curve,
X: curve.Gx,
Y: curve.Gy,
}
num := big.NewInt(-1)
q, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if q == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
}
num.Mod(num, curve.N)
e, err := p.ScalarMult(num)
if err != nil {
t.Errorf("ScalarMult failed: %v", err)
}
if e == nil {
t.Errorf("ScalarMult failed when it should've succeeded.")
t.FailNow()
}
if !bytes.Equal(q.Bytes(), e.Bytes()) {
t.Errorf("ScalarMult should've returned the %v, found: %v", e, p)
}
}
func TestEcPointAddSimple(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(1)
p1, _ := NewScalarBaseMult(curve, num)
p2, _ := NewScalarBaseMult(curve, num)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
num = big.NewInt(2)
ep, _ := NewScalarBaseMult(curve, num)
if !bytes.Equal(ep.Bytes(), p3.Bytes()) {
t.Errorf("EcPoint.Add failed: should equal %v, found: %v", ep, p3)
}
}
func TestEcPointAddCommunicative(t *testing.T) {
curve := btcec.S256()
a, _ := core.Rand(curve.Params().N)
b, _ := core.Rand(curve.Params().N)
p1, _ := NewScalarBaseMult(curve, a)
p2, _ := NewScalarBaseMult(curve, b)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
p4, err := p2.Add(p1)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
if !bytes.Equal(p3.Bytes(), p4.Bytes()) {
t.Errorf("EcPoint.Add Communicative not valid")
}
}
func TestEcPointAddNeg(t *testing.T) {
curve := btcec.S256()
num := big.NewInt(-1)
p1, _ := NewScalarBaseMult(curve, num)
num.Abs(num)
p2, _ := NewScalarBaseMult(curve, num)
p3, err := p1.Add(p2)
if err != nil {
t.Errorf("EcPoint.Add failed: %v", err)
}
zero := make([]byte, 64)
if !bytes.Equal(zero, p3.Bytes()) {
t.Errorf("Expected value to be zero, found: %v", p3)
}
}
func TestEcPointBytes(t *testing.T) {
curve := btcec.S256()
point, err := NewScalarBaseMult(curve, big.NewInt(2))
require.NoError(t, err)
data := point.Bytes()
point2, err := PointFromBytesUncompressed(curve, data)
require.NoError(t, err)
if point.X.Cmp(point2.X) != 0 && point.Y.Cmp(point2.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", point, point2)
}
curve2 := elliptic.P224()
p2, err := NewScalarBaseMult(curve2, big.NewInt(2))
require.NoError(t, err)
dta := p2.Bytes()
point3, err := PointFromBytesUncompressed(curve2, dta)
require.NoError(t, err)
if p2.X.Cmp(point3.X) != 0 && p2.Y.Cmp(point3.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", p2, point3)
}
curve3 := elliptic.P521()
p3, err := NewScalarBaseMult(curve3, big.NewInt(2))
require.NoError(t, err)
data = p3.Bytes()
point4, err := PointFromBytesUncompressed(curve3, data)
require.NoError(t, err)
if p3.X.Cmp(point4.X) != 0 && p3.Y.Cmp(point4.Y) != 0 {
t.Errorf("Points are not equal. Expected %v, found %v", p3, point4)
}
}
func TestEcPointBytesDifferentCurves(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
kp, err := NewScalarBaseMult(k256, big.NewInt(1))
require.NoError(t, err)
data := kp.Bytes()
_, err = PointFromBytesUncompressed(p224, data)
require.Error(t, err)
_, err = PointFromBytesUncompressed(p256, data)
require.Error(t, err)
}
func TestEcPointBytesInvalidNumberBytes(t *testing.T) {
curve := btcec.S256()
for i := 1; i < 64; i++ {
data := make([]byte, i)
_, err := PointFromBytesUncompressed(curve, data)
require.Error(t, err)
}
for i := 65; i < 128; i++ {
data := make([]byte, i)
_, err := PointFromBytesUncompressed(curve, data)
require.Error(t, err)
}
}
func TestEcPointMultRandom(t *testing.T) {
curve := btcec.S256()
r, err := core.Rand(curve.N)
require.NoError(t, err)
pt, err := NewScalarBaseMult(curve, r)
require.NoError(t, err)
require.NotNil(t, pt)
data := pt.Bytes()
pt2, err := PointFromBytesUncompressed(curve, data)
require.NoError(t, err)
if pt.X.Cmp(pt2.X) != 0 || pt.Y.Cmp(pt2.Y) != 0 {
t.Errorf("Points are not equal. Expected: %v, found: %v", pt, pt2)
}
}
func TestIsBasePoint(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
notG_p224, err := NewScalarBaseMult(p224, tt.B10("9876453120"))
require.NoError(t, err)
tests := []struct {
name string
curve elliptic.Curve
x, y *big.Int
expected bool
}{
{"k256-positive", k256, k256.Gx, k256.Gy, true},
{"p224-positive", p224, p224.Params().Gx, p224.Params().Gy, true},
{"p256-positive", p256, p256.Params().Gx, p256.Params().Gy, true},
{"p224-negative", p224, notG_p224.X, notG_p224.Y, false},
{"p256-negative-wrong-curve", p256, notG_p224.X, notG_p224.Y, false},
{"k256-negative-doubleGx", k256, k256.Gx, k256.Gx, false},
{"k256-negative-doubleGy", k256, k256.Gy, k256.Gy, false},
{"k256-negative-xy-swap", k256, k256.Gy, k256.Gx, false},
{"k256-negative-oh-oh", k256, core.Zero, core.Zero, false},
}
// Run all the tests!
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := EcPoint{test.curve, test.x, test.y}.IsBasePoint()
require.Equal(t, test.expected, actual)
})
}
}
func TestEquals(t *testing.T) {
k256 := btcec.S256()
p224 := elliptic.P224()
p256 := elliptic.P256()
P_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120"))
P1_p224, _ := NewScalarBaseMult(p224, tt.B10("9876453120"))
P_k256 := &EcPoint{k256, P_p224.X, P_p224.Y}
id_p224 := &EcPoint{p224, core.Zero, core.Zero}
id_k256 := &EcPoint{k256, core.Zero, core.Zero}
id_p256 := &EcPoint{p256, core.Zero, core.Zero}
tests := []struct {
name string
x, y *EcPoint
expected bool
}{
{"p224 same pointer", P_p224, P_p224, true},
{"p224 same Point", P_p224, P1_p224, true},
{"p224 identity", id_p224, id_p224, true},
{"p256 identity", id_p256, id_p256, true},
{"k256 identity", id_k256, id_k256, true},
{"negative-same x different y", P_p224, &EcPoint{p224, P_p224.X, core.One}, false},
{"negative-same y different x", P_p224, &EcPoint{p224, core.Two, P_k256.Y}, false},
{"negative-wrong curve", P_p224, P_k256, false},
{"negative-wrong curve reversed", P_k256, P_p224, false},
{"Point is not the identity", P_p224, id_p224, false},
{"negative nil", P1_p224, nil, false},
{"identities on wrong curve", id_p256, id_k256, false},
}
// Run all the tests!
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
actual := test.x.Equals(test.y)
require.Equal(t, test.expected, actual)
})
}
}

View File

@ -0,0 +1,351 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
crand "crypto/rand"
"crypto/sha512"
"fmt"
"io"
"math/big"
"filippo.io/edwards25519"
"github.com/btcsuite/btcd/btcec"
"github.com/bwesterb/go-ristretto"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/bls12381"
)
type EcScalar interface {
Add(x, y *big.Int) *big.Int
Sub(x, y *big.Int) *big.Int
Neg(x *big.Int) *big.Int
Mul(x, y *big.Int) *big.Int
Hash(input []byte) *big.Int
Div(x, y *big.Int) *big.Int
Random() (*big.Int, error)
IsValid(x *big.Int) bool
Bytes(x *big.Int) []byte // fixed-length byte array
}
type K256Scalar struct{}
// Static interface assertion
var _ EcScalar = (*K256Scalar)(nil)
// warning: the Euclidean alg which Mod uses is not constant-time.
func NewK256Scalar() *K256Scalar {
return &K256Scalar{}
}
func (k K256Scalar) Add(x, y *big.Int) *big.Int {
v := new(big.Int).Add(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Sub(x, y *big.Int) *big.Int {
v := new(big.Int).Sub(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Neg(x *big.Int) *big.Int {
v := new(big.Int).Sub(btcec.S256().N, x)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Mul(x, y *big.Int) *big.Int {
v := new(big.Int).Mul(x, y)
v.Mod(v, btcec.S256().N)
return v
}
func (k K256Scalar) Div(x, y *big.Int) *big.Int {
t := new(big.Int).ModInverse(y, btcec.S256().N)
return k.Mul(x, t)
}
func (k K256Scalar) Hash(input []byte) *big.Int {
return new(ScalarK256).Hash(input).BigInt()
}
func (k K256Scalar) Random() (*big.Int, error) {
b := make([]byte, 48)
n, err := crand.Read(b)
if err != nil {
return nil, err
}
if n != 48 {
return nil, fmt.Errorf("insufficient bytes read")
}
v := new(big.Int).SetBytes(b)
v.Mod(v, btcec.S256().N)
return v, nil
}
func (k K256Scalar) IsValid(x *big.Int) bool {
return core.In(x, btcec.S256().N) == nil
}
func (k K256Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
type P256Scalar struct{}
// Static interface assertion
var _ EcScalar = (*P256Scalar)(nil)
func NewP256Scalar() *P256Scalar {
return &P256Scalar{}
}
func (k P256Scalar) Add(x, y *big.Int) *big.Int {
v := new(big.Int).Add(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Sub(x, y *big.Int) *big.Int {
v := new(big.Int).Sub(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Neg(x *big.Int) *big.Int {
v := new(big.Int).Sub(elliptic.P256().Params().N, x)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Mul(x, y *big.Int) *big.Int {
v := new(big.Int).Mul(x, y)
v.Mod(v, elliptic.P256().Params().N)
return v
}
func (k P256Scalar) Div(x, y *big.Int) *big.Int {
t := new(big.Int).ModInverse(y, elliptic.P256().Params().N)
return k.Mul(x, t)
}
func (k P256Scalar) Hash(input []byte) *big.Int {
return new(ScalarP256).Hash(input).BigInt()
}
func (k P256Scalar) Random() (*big.Int, error) {
b := make([]byte, 48)
n, err := crand.Read(b)
if err != nil {
return nil, err
}
if n != 48 {
return nil, fmt.Errorf("insufficient bytes read")
}
v := new(big.Int).SetBytes(b)
v.Mod(v, elliptic.P256().Params().N)
return v, nil
}
func (k P256Scalar) IsValid(x *big.Int) bool {
return core.In(x, elliptic.P256().Params().N) == nil
}
func (k P256Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
type Bls12381Scalar struct{}
// Static interface assertion
var _ EcScalar = (*Bls12381Scalar)(nil)
func NewBls12381Scalar() *Bls12381Scalar {
return &Bls12381Scalar{}
}
func (k Bls12381Scalar) Add(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Add(a, b).BigInt()
}
func (k Bls12381Scalar) Sub(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Sub(a, b).BigInt()
}
func (k Bls12381Scalar) Neg(x *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
return a.Neg(a).BigInt()
}
func (k Bls12381Scalar) Mul(x, y *big.Int) *big.Int {
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
return a.Mul(a, b).BigInt()
}
func (k Bls12381Scalar) Div(x, y *big.Int) *big.Int {
c := bls12381.Bls12381FqNew()
a := bls12381.Bls12381FqNew().SetBigInt(x)
b := bls12381.Bls12381FqNew().SetBigInt(y)
_, wasInverted := c.Invert(b)
c.Mul(a, c)
tt := map[bool]int{false: 0, true: 1}
return a.CMove(a, c, tt[wasInverted]).BigInt()
}
func (k Bls12381Scalar) Hash(input []byte) *big.Int {
return new(ScalarBls12381).Hash(input).BigInt()
}
func (k Bls12381Scalar) Random() (*big.Int, error) {
a := BLS12381G1().NewScalar().Random(crand.Reader)
if a == nil {
return nil, fmt.Errorf("invalid random value")
}
return a.BigInt(), nil
}
func (k Bls12381Scalar) Bytes(x *big.Int) []byte {
bytes := make([]byte, 32)
x.FillBytes(bytes) // big-endian; will left-pad.
return bytes
}
func (k Bls12381Scalar) IsValid(x *big.Int) bool {
a := bls12381.Bls12381FqNew().SetBigInt(x)
return a.BigInt().Cmp(x) == 0
}
// taken from https://datatracker.ietf.org/doc/html/rfc8032
var ed25519N, _ = new(big.Int).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16)
type Ed25519Scalar struct{}
// Static interface assertion
var _ EcScalar = (*Ed25519Scalar)(nil)
func NewEd25519Scalar() *Ed25519Scalar {
return &Ed25519Scalar{}
}
func (k Ed25519Scalar) Add(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Add(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Sub(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Subtract(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Neg(x *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
a.Negate(a)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Mul(x, y *big.Int) *big.Int {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
a.Multiply(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Div(x, y *big.Int) *big.Int {
b, err := internal.BigInt2Ed25519Scalar(y)
if err != nil {
panic(err)
}
b.Invert(b)
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
a.Multiply(a, b)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(a.Bytes()))
}
func (k Ed25519Scalar) Hash(input []byte) *big.Int {
v := new(ristretto.Scalar).Derive(input)
var data [32]byte
v.BytesInto(&data)
return new(big.Int).SetBytes(internal.ReverseScalarBytes(data[:]))
}
func (k Ed25519Scalar) Bytes(x *big.Int) []byte {
a, err := internal.BigInt2Ed25519Scalar(x)
if err != nil {
panic(err)
}
return internal.ReverseScalarBytes(a.Bytes())
}
func (k Ed25519Scalar) Random() (*big.Int, error) {
return k.RandomWithReader(crand.Reader)
}
func (k Ed25519Scalar) RandomWithReader(r io.Reader) (*big.Int, error) {
b := make([]byte, 64)
n, err := r.Read(b)
if err != nil {
return nil, err
}
if n != 64 {
return nil, fmt.Errorf("insufficient bytes read")
}
digest := sha512.Sum512(b)
var hBytes [32]byte
copy(hBytes[:], digest[:])
s, err := edwards25519.NewScalar().SetBytesWithClamping(hBytes[:])
if err != nil {
return nil, err
}
return new(big.Int).SetBytes(internal.ReverseScalarBytes(s.Bytes())), nil
}
func (k Ed25519Scalar) IsValid(x *big.Int) bool {
return x.Cmp(ed25519N) == -1
}

View File

@ -0,0 +1,38 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/ecdsa"
"math/big"
)
// EcdsaVerify runs a curve- or algorithm-specific ECDSA verification function on input
// an ECDSA public (verification) key, a message digest, and an ECDSA signature.
// It must return true if all the parameters are sane and the ECDSA signature is valid,
// and false otherwise
type EcdsaVerify func(pubKey *EcPoint, hash []byte, signature *EcdsaSignature) bool
// EcdsaSignature represents a (composite) digital signature
type EcdsaSignature struct {
V int
R, S *big.Int
}
// Static type assertion
var _ EcdsaVerify = VerifyEcdsa
// Verifies ECDSA signature using core types.
func VerifyEcdsa(pk *EcPoint, hash []byte, sig *EcdsaSignature) bool {
return ecdsa.Verify(
&ecdsa.PublicKey{
Curve: pk.Curve,
X: pk.X,
Y: pk.Y,
},
hash, sig.R, sig.S)
}

View File

@ -0,0 +1,786 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"bytes"
"crypto/sha512"
"crypto/subtle"
"fmt"
"io"
"math/big"
"filippo.io/edwards25519"
"filippo.io/edwards25519/field"
"github.com/bwesterb/go-ristretto"
ed "github.com/bwesterb/go-ristretto/edwards25519"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
)
type ScalarEd25519 struct {
value *edwards25519.Scalar
}
type PointEd25519 struct {
value *edwards25519.Point
}
var scOne, _ = edwards25519.NewScalar().SetCanonicalBytes([]byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0})
func (s *ScalarEd25519) Random(reader io.Reader) Scalar {
if reader == nil {
return nil
}
var seed [64]byte
_, _ = reader.Read(seed[:])
return s.Hash(seed[:])
}
func (s *ScalarEd25519) Hash(bytes []byte) Scalar {
v := new(ristretto.Scalar).Derive(bytes)
var data [32]byte
v.BytesInto(&data)
value, err := edwards25519.NewScalar().SetCanonicalBytes(data[:])
if err != nil {
return nil
}
return &ScalarEd25519{value}
}
func (s *ScalarEd25519) Zero() Scalar {
return &ScalarEd25519{
value: edwards25519.NewScalar(),
}
}
func (s *ScalarEd25519) One() Scalar {
return &ScalarEd25519{
value: edwards25519.NewScalar().Set(scOne),
}
}
func (s *ScalarEd25519) IsZero() bool {
i := byte(0)
for _, b := range s.value.Bytes() {
i |= b
}
return i == 0
}
func (s *ScalarEd25519) IsOne() bool {
data := s.value.Bytes()
i := byte(0)
for j := 1; j < len(data); j++ {
i |= data[j]
}
return i == 0 && data[0] == 1
}
func (s *ScalarEd25519) IsOdd() bool {
return s.value.Bytes()[0]&1 == 1
}
func (s *ScalarEd25519) IsEven() bool {
return s.value.Bytes()[0]&1 == 0
}
func (s *ScalarEd25519) New(input int) Scalar {
var data [64]byte
i := input
if input < 0 {
i = -input
}
data[0] = byte(i)
data[1] = byte(i >> 8)
data[2] = byte(i >> 16)
data[3] = byte(i >> 24)
value, err := edwards25519.NewScalar().SetUniformBytes(data[:])
if err != nil {
return nil
}
if input < 0 {
value.Negate(value)
}
return &ScalarEd25519{
value,
}
}
func (s *ScalarEd25519) Cmp(rhs Scalar) int {
r := s.Sub(rhs)
if r != nil && r.IsZero() {
return 0
} else {
return -2
}
}
func (s *ScalarEd25519) Square() Scalar {
value := edwards25519.NewScalar().Multiply(s.value, s.value)
return &ScalarEd25519{value}
}
func (s *ScalarEd25519) Double() Scalar {
return &ScalarEd25519{
value: edwards25519.NewScalar().Add(s.value, s.value),
}
}
func (s *ScalarEd25519) Invert() (Scalar, error) {
return &ScalarEd25519{
value: edwards25519.NewScalar().Invert(s.value),
}, nil
}
func (s *ScalarEd25519) Sqrt() (Scalar, error) {
bi25519, _ := new(big.Int).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16)
x := s.BigInt()
x.ModSqrt(x, bi25519)
return s.SetBigInt(x)
}
func (s *ScalarEd25519) Cube() Scalar {
value := edwards25519.NewScalar().Multiply(s.value, s.value)
value.Multiply(value, s.value)
return &ScalarEd25519{value}
}
func (s *ScalarEd25519) Add(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd25519)
if ok {
return &ScalarEd25519{
value: edwards25519.NewScalar().Add(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarEd25519) Sub(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd25519)
if ok {
return &ScalarEd25519{
value: edwards25519.NewScalar().Subtract(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarEd25519) Mul(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd25519)
if ok {
return &ScalarEd25519{
value: edwards25519.NewScalar().Multiply(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarEd25519) MulAdd(y, z Scalar) Scalar {
yy, ok := y.(*ScalarEd25519)
if !ok {
return nil
}
zz, ok := z.(*ScalarEd25519)
if !ok {
return nil
}
return &ScalarEd25519{value: edwards25519.NewScalar().MultiplyAdd(s.value, yy.value, zz.value)}
}
func (s *ScalarEd25519) Div(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd25519)
if ok {
value := edwards25519.NewScalar().Invert(r.value)
value.Multiply(value, s.value)
return &ScalarEd25519{value}
} else {
return nil
}
}
func (s *ScalarEd25519) Neg() Scalar {
return &ScalarEd25519{
value: edwards25519.NewScalar().Negate(s.value),
}
}
func (s *ScalarEd25519) SetBigInt(x *big.Int) (Scalar, error) {
if x == nil {
return nil, fmt.Errorf("invalid value")
}
bi25519, _ := new(big.Int).SetString("1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", 16)
var v big.Int
buf := v.Mod(x, bi25519).Bytes()
var rBuf [32]byte
for i := 0; i < len(buf) && i < 32; i++ {
rBuf[i] = buf[len(buf)-i-1]
}
value, err := edwards25519.NewScalar().SetCanonicalBytes(rBuf[:])
if err != nil {
return nil, err
}
return &ScalarEd25519{value}, nil
}
func (s *ScalarEd25519) BigInt() *big.Int {
var ret big.Int
buf := internal.ReverseScalarBytes(s.value.Bytes())
return ret.SetBytes(buf)
}
func (s *ScalarEd25519) Bytes() []byte {
return s.value.Bytes()
}
// SetBytes takes input a 32-byte long array and returns a ed25519 scalar.
// The input must be 32-byte long and must be a reduced bytes.
func (s *ScalarEd25519) SetBytes(input []byte) (Scalar, error) {
if len(input) != 32 {
return nil, fmt.Errorf("invalid byte sequence")
}
value, err := edwards25519.NewScalar().SetCanonicalBytes(input)
if err != nil {
return nil, err
}
return &ScalarEd25519{value}, nil
}
// SetBytesWide takes input a 64-byte long byte array, reduce it and return an ed25519 scalar.
// It uses SetUniformBytes of fillipo.io/edwards25519 - https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L85
// If bytes is not of the right length, it returns nil and an error
func (s *ScalarEd25519) SetBytesWide(bytes []byte) (Scalar, error) {
value, err := edwards25519.NewScalar().SetUniformBytes(bytes)
if err != nil {
return nil, err
}
return &ScalarEd25519{value}, nil
}
// SetBytesClamping uses SetBytesWithClamping of fillipo.io/edwards25519- https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L135
// which applies the buffer pruning described in RFC 8032, Section 5.1.5 (also known as clamping)
// and sets bytes to the result. The input must be 32-byte long, and it is not modified.
// If bytes is not of the right length, SetBytesWithClamping returns nil and an error, and the receiver is unchanged.
func (s *ScalarEd25519) SetBytesClamping(bytes []byte) (Scalar, error) {
value, err := edwards25519.NewScalar().SetBytesWithClamping(bytes)
if err != nil {
return nil, err
}
return &ScalarEd25519{value}, nil
}
// SetBytesCanonical uses SetCanonicalBytes of fillipo.io/edwards25519.
// https://github.com/FiloSottile/edwards25519/blob/v1.0.0-rc.1/scalar.go#L98
// This function takes an input x and sets s = x, where x is a 32-byte little-endian
// encoding of s, then it returns the corresponding ed25519 scalar. If the input is
// not a canonical encoding of s, it returns nil and an error.
func (s *ScalarEd25519) SetBytesCanonical(bytes []byte) (Scalar, error) {
return s.SetBytes(bytes)
}
func (s *ScalarEd25519) Point() Point {
return new(PointEd25519).Identity()
}
func (s *ScalarEd25519) Clone() Scalar {
return &ScalarEd25519{
value: edwards25519.NewScalar().Set(s.value),
}
}
func (s *ScalarEd25519) MarshalBinary() ([]byte, error) {
return scalarMarshalBinary(s)
}
func (s *ScalarEd25519) UnmarshalBinary(input []byte) error {
sc, err := scalarUnmarshalBinary(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarEd25519)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarEd25519) MarshalText() ([]byte, error) {
return scalarMarshalText(s)
}
func (s *ScalarEd25519) UnmarshalText(input []byte) error {
sc, err := scalarUnmarshalText(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarEd25519)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarEd25519) GetEdwardsScalar() *edwards25519.Scalar {
return edwards25519.NewScalar().Set(s.value)
}
func (s *ScalarEd25519) SetEdwardsScalar(sc *edwards25519.Scalar) *ScalarEd25519 {
return &ScalarEd25519{value: edwards25519.NewScalar().Set(sc)}
}
func (s *ScalarEd25519) MarshalJSON() ([]byte, error) {
return scalarMarshalJson(s)
}
func (s *ScalarEd25519) UnmarshalJSON(input []byte) error {
sc, err := scalarUnmarshalJson(input)
if err != nil {
return err
}
S, ok := sc.(*ScalarEd25519)
if !ok {
return fmt.Errorf("invalid type")
}
s.value = S.value
return nil
}
func (p *PointEd25519) Random(reader io.Reader) Point {
var seed [64]byte
_, _ = reader.Read(seed[:])
return p.Hash(seed[:])
}
func (p *PointEd25519) Hash(bytes []byte) Point {
/// Perform hashing to the group using the Elligator2 map
///
/// See https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#section-6.7.1
h := sha512.Sum512(bytes)
var res [32]byte
copy(res[:], h[:32])
signBit := (res[31] & 0x80) >> 7
fe := new(ed.FieldElement).SetBytes(&res).BytesInto(&res)
m1 := elligatorEncode(fe)
return toEdwards(m1, signBit)
}
func (p *PointEd25519) Identity() Point {
return &PointEd25519{
value: edwards25519.NewIdentityPoint(),
}
}
func (p *PointEd25519) Generator() Point {
return &PointEd25519{
value: edwards25519.NewGeneratorPoint(),
}
}
func (p *PointEd25519) IsIdentity() bool {
return p.Equal(p.Identity())
}
func (p *PointEd25519) IsNegative() bool {
// Negative points don't really exist in ed25519
return false
}
func (p *PointEd25519) IsOnCurve() bool {
_, err := edwards25519.NewIdentityPoint().SetBytes(p.ToAffineCompressed())
return err == nil
}
func (p *PointEd25519) Double() Point {
return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, p.value)}
}
func (p *PointEd25519) Scalar() Scalar {
return new(ScalarEd25519).Zero()
}
func (p *PointEd25519) Neg() Point {
return &PointEd25519{value: edwards25519.NewIdentityPoint().Negate(p.value)}
}
func (p *PointEd25519) Add(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointEd25519)
if ok {
return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, r.value)}
} else {
return nil
}
}
func (p *PointEd25519) Sub(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointEd25519)
if ok {
rTmp := edwards25519.NewIdentityPoint().Negate(r.value)
return &PointEd25519{value: edwards25519.NewIdentityPoint().Add(p.value, rTmp)}
} else {
return nil
}
}
func (p *PointEd25519) Mul(rhs Scalar) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*ScalarEd25519)
if ok {
value := edwards25519.NewIdentityPoint().ScalarMult(r.value, p.value)
return &PointEd25519{value}
} else {
return nil
}
}
// MangleScalarBitsAndMulByBasepointToProducePublicKey
// is a function for mangling the bits of a (formerly
// mathematically well-defined) "scalar" and multiplying it to produce a
// public key.
func (p *PointEd25519) MangleScalarBitsAndMulByBasepointToProducePublicKey(rhs *ScalarEd25519) *PointEd25519 {
data := rhs.value.Bytes()
s, err := edwards25519.NewScalar().SetBytesWithClamping(data[:])
if err != nil {
return nil
}
value := edwards25519.NewIdentityPoint().ScalarBaseMult(s)
return &PointEd25519{value}
}
func (p *PointEd25519) Equal(rhs Point) bool {
r, ok := rhs.(*PointEd25519)
if ok {
// We would like to check that the point (X/Z, Y/Z) is equal to
// the point (X'/Z', Y'/Z') without converting into affine
// coordinates (x, y) and (x', y'), which requires two inversions.
// We have that X = xZ and X' = x'Z'. Thus, x = x' is equivalent to
// (xZ)Z' = (x'Z')Z, and similarly for the y-coordinate.
return p.value.Equal(r.value) == 1
//lhs1 := new(ed.FieldElement).Mul(&p.value.X, &r.value.Z)
//rhs1 := new(ed.FieldElement).Mul(&r.value.X, &p.value.Z)
//lhs2 := new(ed.FieldElement).Mul(&p.value.Y, &r.value.Z)
//rhs2 := new(ed.FieldElement).Mul(&r.value.Y, &p.value.Z)
//
//return lhs1.Equals(rhs1) && lhs2.Equals(rhs2)
} else {
return false
}
}
func (p *PointEd25519) Set(x, y *big.Int) (Point, error) {
// check is identity
xx := subtle.ConstantTimeCompare(x.Bytes(), []byte{})
yy := subtle.ConstantTimeCompare(y.Bytes(), []byte{})
if (xx | yy) == 1 {
return p.Identity(), nil
}
xElem := new(ed.FieldElement).SetBigInt(x)
yElem := new(ed.FieldElement).SetBigInt(y)
var data [32]byte
var affine [64]byte
xElem.BytesInto(&data)
copy(affine[:32], data[:])
yElem.BytesInto(&data)
copy(affine[32:], data[:])
return p.FromAffineUncompressed(affine[:])
}
// sqrtRatio sets r to the non-negative square root of the ratio of u and v.
//
// If u/v is square, sqrtRatio returns r and 1. If u/v is not square, SqrtRatio
// sets r according to Section 4.3 of draft-irtf-cfrg-ristretto255-decaf448-00,
// and returns r and 0.
func sqrtRatio(u, v *ed.FieldElement) (r *ed.FieldElement, wasSquare bool) {
var sqrtM1 = ed.FieldElement{
533094393274173, 2016890930128738, 18285341111199,
134597186663265, 1486323764102114,
}
a := new(ed.FieldElement)
b := new(ed.FieldElement)
r = new(ed.FieldElement)
// r = (u * v3) * (u * v7)^((p-5)/8)
v2 := a.Square(v)
uv3 := b.Mul(u, b.Mul(v2, v))
uv7 := a.Mul(uv3, a.Square(v2))
r.Mul(uv3, r.Exp22523(uv7))
check := a.Mul(v, a.Square(r)) // check = v * r^2
uNeg := b.Neg(u)
correctSignSqrt := check.Equals(u)
flippedSignSqrt := check.Equals(uNeg)
flippedSignSqrtI := check.Equals(uNeg.Mul(uNeg, &sqrtM1))
rPrime := b.Mul(r, &sqrtM1) // r_prime = SQRT_M1 * r
// r = CT_SELECT(r_prime IF flipped_sign_sqrt | flipped_sign_sqrt_i ELSE r)
cselect(r, rPrime, r, flippedSignSqrt || flippedSignSqrtI)
r.Abs(r) // Choose the nonnegative square root.
return r, correctSignSqrt || flippedSignSqrt
}
// cselect sets v to a if cond == 1, and to b if cond == 0.
func cselect(v, a, b *ed.FieldElement, cond bool) *ed.FieldElement {
const mask64Bits uint64 = (1 << 64) - 1
m := uint64(0)
if cond {
m = mask64Bits
}
v[0] = (m & a[0]) | (^m & b[0])
v[1] = (m & a[1]) | (^m & b[1])
v[2] = (m & a[2]) | (^m & b[2])
v[3] = (m & a[3]) | (^m & b[3])
v[4] = (m & a[4]) | (^m & b[4])
return v
}
func (p *PointEd25519) ToAffineCompressed() []byte {
return p.value.Bytes()
}
func (p *PointEd25519) ToAffineUncompressed() []byte {
x, y, z, _ := p.value.ExtendedCoordinates()
recip := new(field.Element).Invert(z)
x.Multiply(x, recip)
y.Multiply(y, recip)
var out [64]byte
copy(out[:32], x.Bytes())
copy(out[32:], y.Bytes())
return out[:]
}
func (p *PointEd25519) FromAffineCompressed(inBytes []byte) (Point, error) {
pt, err := edwards25519.NewIdentityPoint().SetBytes(inBytes)
if err != nil {
return nil, err
}
return &PointEd25519{value: pt}, nil
}
func (p *PointEd25519) FromAffineUncompressed(inBytes []byte) (Point, error) {
if len(inBytes) != 64 {
return nil, fmt.Errorf("invalid byte sequence")
}
if bytes.Equal(inBytes, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}) {
return &PointEd25519{value: edwards25519.NewIdentityPoint()}, nil
}
x, err := new(field.Element).SetBytes(inBytes[:32])
if err != nil {
return nil, err
}
y, err := new(field.Element).SetBytes(inBytes[32:])
if err != nil {
return nil, err
}
z := new(field.Element).One()
t := new(field.Element).Multiply(x, y)
value, err := edwards25519.NewIdentityPoint().SetExtendedCoordinates(x, y, z, t)
if err != nil {
return nil, err
}
return &PointEd25519{value}, nil
}
func (p *PointEd25519) CurveName() string {
return ED25519Name
}
func (p *PointEd25519) SumOfProducts(points []Point, scalars []Scalar) Point {
nScalars := make([]*edwards25519.Scalar, len(scalars))
nPoints := make([]*edwards25519.Point, len(points))
for i, sc := range scalars {
s, err := edwards25519.NewScalar().SetCanonicalBytes(sc.Bytes())
if err != nil {
return nil
}
nScalars[i] = s
}
for i, pt := range points {
pp, ok := pt.(*PointEd25519)
if !ok {
return nil
}
nPoints[i] = pp.value
}
pt := edwards25519.NewIdentityPoint().MultiScalarMult(nScalars, nPoints)
return &PointEd25519{value: pt}
}
func (p *PointEd25519) VarTimeDoubleScalarBaseMult(a Scalar, A Point, b Scalar) Point {
AA, ok := A.(*PointEd25519)
if !ok {
return nil
}
aa, ok := a.(*ScalarEd25519)
if !ok {
return nil
}
bb, ok := b.(*ScalarEd25519)
if !ok {
return nil
}
value := edwards25519.NewIdentityPoint().VarTimeDoubleScalarBaseMult(aa.value, AA.value, bb.value)
return &PointEd25519{value}
}
func (p *PointEd25519) MarshalBinary() ([]byte, error) {
return pointMarshalBinary(p)
}
func (p *PointEd25519) UnmarshalBinary(input []byte) error {
pt, err := pointUnmarshalBinary(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointEd25519)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointEd25519) MarshalText() ([]byte, error) {
return pointMarshalText(p)
}
func (p *PointEd25519) UnmarshalText(input []byte) error {
pt, err := pointUnmarshalText(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointEd25519)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointEd25519) MarshalJSON() ([]byte, error) {
return pointMarshalJson(p)
}
func (p *PointEd25519) UnmarshalJSON(input []byte) error {
pt, err := pointUnmarshalJson(input)
if err != nil {
return err
}
P, ok := pt.(*PointEd25519)
if !ok {
return fmt.Errorf("invalid type")
}
p.value = P.value
return nil
}
func (p *PointEd25519) GetEdwardsPoint() *edwards25519.Point {
return edwards25519.NewIdentityPoint().Set(p.value)
}
func (p *PointEd25519) SetEdwardsPoint(pt *edwards25519.Point) *PointEd25519 {
return &PointEd25519{value: edwards25519.NewIdentityPoint().Set(pt)}
}
// Attempt to convert to an `EdwardsPoint`, using the supplied
// choice of sign for the `EdwardsPoint`.
// * `sign`: a `u8` donating the desired sign of the resulting
// `EdwardsPoint`. `0` denotes positive and `1` negative.
func toEdwards(u *ed.FieldElement, sign byte) *PointEd25519 {
one := new(ed.FieldElement).SetOne()
// To decompress the Montgomery u coordinate to an
// `EdwardsPoint`, we apply the birational map to obtain the
// Edwards y coordinate, then do Edwards decompression.
//
// The birational map is y = (u-1)/(u+1).
//
// The exceptional points are the zeros of the denominator,
// i.e., u = -1.
//
// But when u = -1, v^2 = u*(u^2+486662*u+1) = 486660.
//
// Since this is nonsquare mod p, u = -1 corresponds to a point
// on the twist, not the curve, so we can reject it early.
if u.Equals(new(ed.FieldElement).Neg(one)) {
return nil
}
// y = (u-1)/(u+1)
yLhs := new(ed.FieldElement).Sub(u, one)
yRhs := new(ed.FieldElement).Add(u, one)
yInv := new(ed.FieldElement).Inverse(yRhs)
y := new(ed.FieldElement).Mul(yLhs, yInv)
yBytes := y.Bytes()
yBytes[31] ^= sign << 7
pt, err := edwards25519.NewIdentityPoint().SetBytes(yBytes[:])
if err != nil {
return nil
}
pt.MultByCofactor(pt)
return &PointEd25519{value: pt}
}
// Perform the Elligator2 mapping to a Montgomery point encoded as a 32 byte value
//
// See <https://tools.ietf.org/html/draft-irtf-cfrg-hash-to-curve-11#section-6.7.1>
func elligatorEncode(r0 *ed.FieldElement) *ed.FieldElement {
montgomeryA := &ed.FieldElement{
486662, 0, 0, 0, 0,
}
// montgomeryANeg is equal to -486662.
montgomeryANeg := &ed.FieldElement{2251799813198567,
2251799813685247,
2251799813685247,
2251799813685247,
2251799813685247}
t := new(ed.FieldElement)
one := new(ed.FieldElement).SetOne()
// 2r^2
d1 := new(ed.FieldElement).Add(one, t.DoubledSquare(r0))
// A/(1+2r^2)
d := new(ed.FieldElement).Mul(montgomeryANeg, t.Inverse(d1))
dsq := new(ed.FieldElement).Square(d)
au := new(ed.FieldElement).Mul(montgomeryA, d)
inner := new(ed.FieldElement).Add(dsq, au)
inner.Add(inner, one)
// d^3 + Ad^2 + d
eps := new(ed.FieldElement).Mul(d, inner)
_, wasSquare := sqrtRatio(eps, one)
zero := new(ed.FieldElement).SetZero()
aTemp := new(ed.FieldElement).SetZero()
// 0 or A if non-square
cselect(aTemp, zero, montgomeryA, wasSquare)
// d, or d+A if non-square
u := new(ed.FieldElement).Add(d, aTemp)
// d or -d-A if non-square
cselect(u, u, new(ed.FieldElement).Neg(u), wasSquare)
return u
}

View File

@ -0,0 +1,403 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
crand "crypto/rand"
"encoding/hex"
"math/big"
"testing"
ed "filippo.io/edwards25519"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
)
func TestScalarEd25519Random(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Scalar.Random(testRng())
s, ok := sc.(*ScalarEd25519)
require.True(t, ok)
expected := toRSc("feaa6a9d6dda758da6145f7d411a3af9f8a120698e0093faa97085b384c3f00e")
require.Equal(t, s.value.Equal(expected), 1)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := ed25519.Scalar.Random(crand.Reader)
_, ok := sc.(*ScalarEd25519)
require.True(t, ok)
require.True(t, !sc.IsZero())
}
}
func TestScalarEd25519Hash(t *testing.T) {
var b [32]byte
ed25519 := ED25519()
sc := ed25519.Scalar.Hash(b[:])
s, ok := sc.(*ScalarEd25519)
require.True(t, ok)
expected := toRSc("9d574494a02d72f5ff311cf0fb844d0fdd6103b17255274e029bdeed7207d409")
require.Equal(t, s.value.Equal(expected), 1)
}
func TestScalarEd25519Zero(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Scalar.Zero()
require.True(t, sc.IsZero())
require.True(t, sc.IsEven())
}
func TestScalarEd25519One(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Scalar.One()
require.True(t, sc.IsOne())
require.True(t, sc.IsOdd())
}
func TestScalarEd25519New(t *testing.T) {
ed25519 := ED25519()
three := ed25519.Scalar.New(3)
require.True(t, three.IsOdd())
four := ed25519.Scalar.New(4)
require.True(t, four.IsEven())
neg1 := ed25519.Scalar.New(-1)
require.True(t, neg1.IsEven())
neg2 := ed25519.Scalar.New(-2)
require.True(t, neg2.IsOdd())
}
func TestScalarEd25519Square(t *testing.T) {
ed25519 := ED25519()
three := ed25519.Scalar.New(3)
nine := ed25519.Scalar.New(9)
require.Equal(t, three.Square().Cmp(nine), 0)
}
func TestScalarEd25519Cube(t *testing.T) {
ed25519 := ED25519()
three := ed25519.Scalar.New(3)
twentySeven := ed25519.Scalar.New(27)
require.Equal(t, three.Cube().Cmp(twentySeven), 0)
}
func TestScalarEd25519Double(t *testing.T) {
ed25519 := ED25519()
three := ed25519.Scalar.New(3)
six := ed25519.Scalar.New(6)
require.Equal(t, three.Double().Cmp(six), 0)
}
func TestScalarEd25519Neg(t *testing.T) {
ed25519 := ED25519()
one := ed25519.Scalar.One()
neg1 := ed25519.Scalar.New(-1)
require.Equal(t, one.Neg().Cmp(neg1), 0)
lotsOfThrees := ed25519.Scalar.New(333333)
expected := ed25519.Scalar.New(-333333)
require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0)
}
func TestScalarEd25519Invert(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
actual, _ := nine.Invert()
sa, _ := actual.(*ScalarEd25519)
expected := toRSc("c3d9c4db0516043013b1e1ce8637dc92e3388ee3388ee3388ee3388ee3388e03")
require.Equal(t, sa.value.Equal(expected), 1)
}
func TestScalarEd25519Sqrt(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
actual, err := nine.Sqrt()
sa, _ := actual.(*ScalarEd25519)
expected := toRSc("03")
require.NoError(t, err)
require.Equal(t, sa.value.Equal(expected), 1)
}
func TestScalarEd25519Add(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
six := ed25519.Scalar.New(6)
fifteen := nine.Add(six)
require.NotNil(t, fifteen)
expected := ed25519.Scalar.New(15)
require.Equal(t, expected.Cmp(fifteen), 0)
upper := ed25519.Scalar.New(-3)
actual := upper.Add(nine)
require.NotNil(t, actual)
require.Equal(t, actual.Cmp(six), 0)
}
func TestScalarEd25519Sub(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
six := ed25519.Scalar.New(6)
expected := ed25519.Scalar.New(-3)
actual := six.Sub(nine)
require.Equal(t, expected.Cmp(actual), 0)
actual = nine.Sub(six)
require.Equal(t, actual.Cmp(ed25519.Scalar.New(3)), 0)
}
func TestScalarEd25519Mul(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
six := ed25519.Scalar.New(6)
actual := nine.Mul(six)
require.Equal(t, actual.Cmp(ed25519.Scalar.New(54)), 0)
upper := ed25519.Scalar.New(-1)
require.Equal(t, upper.Mul(upper).Cmp(ed25519.Scalar.New(1)), 0)
}
func TestScalarEd25519Div(t *testing.T) {
ed25519 := ED25519()
nine := ed25519.Scalar.New(9)
actual := nine.Div(nine)
require.Equal(t, actual.Cmp(ed25519.Scalar.New(1)), 0)
require.Equal(t, ed25519.Scalar.New(54).Div(nine).Cmp(ed25519.Scalar.New(6)), 0)
}
func TestScalarEd25519Serialize(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Scalar.New(255)
sequence := sc.Bytes()
require.Equal(t, len(sequence), 32)
require.Equal(t, sequence, []byte{0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0})
ret, err := ed25519.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
// Try 10 random values
for i := 0; i < 10; i++ {
sc = ed25519.Scalar.Random(crand.Reader)
sequence = sc.Bytes()
require.Equal(t, len(sequence), 32)
ret, err = ed25519.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
}
}
func TestScalarEd25519Nil(t *testing.T) {
ed25519 := ED25519()
one := ed25519.Scalar.New(1)
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, one.Div(nil))
require.Nil(t, ed25519.Scalar.Random(nil))
require.Equal(t, one.Cmp(nil), -2)
_, err := ed25519.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointEd25519Random(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Point.Random(testRng())
s, ok := sc.(*PointEd25519)
require.True(t, ok)
expected := toRPt("6011540c6231421a70ced5f577432531f198d318facfaad6e52cc42fba6e6fc5")
require.True(t, s.Equal(&PointEd25519{expected}))
// Try 25 random values
for i := 0; i < 25; i++ {
sc := ed25519.Point.Random(crand.Reader)
_, ok := sc.(*PointEd25519)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
pBytes := sc.ToAffineCompressed()
_, err := ed.NewIdentityPoint().SetBytes(pBytes)
require.NoError(t, err)
}
}
func TestPointEd25519Hash(t *testing.T) {
var b [32]byte
ed25519 := ED25519()
sc := ed25519.Point.Hash(b[:])
s, ok := sc.(*PointEd25519)
require.True(t, ok)
expected := toRPt("b4d75c3bb03ca644ab6c6d2a955c911003d8cfa719415de93a6b85eeb0c8dd97")
require.True(t, s.Equal(&PointEd25519{expected}))
// Fuzz test
for i := 0; i < 25; i++ {
_, _ = crand.Read(b[:])
sc = ed25519.Point.Hash(b[:])
require.NotNil(t, sc)
}
}
func TestPointEd25519Identity(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0})
}
func TestPointEd25519Generator(t *testing.T) {
ed25519 := ED25519()
sc := ed25519.Point.Generator()
s, ok := sc.(*PointEd25519)
require.True(t, ok)
require.Equal(t, s.ToAffineCompressed(), []byte{0x58, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66, 0x66})
}
func TestPointEd25519Set(t *testing.T) {
ed25519 := ED25519()
iden, err := ed25519.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
xBytes, _ := hex.DecodeString("1ad5258f602d56c9b2a7259560c72c695cdcd6fd31e2a4c0fe536ecdd3366921")
yBytes, _ := hex.DecodeString("5866666666666666666666666666666666666666666666666666666666666666")
x := new(big.Int).SetBytes(internal.ReverseScalarBytes(xBytes))
y := new(big.Int).SetBytes(internal.ReverseScalarBytes(yBytes))
newPoint, err := ed25519.Point.Set(x, y)
require.NoError(t, err)
require.NotEqualf(t, iden, newPoint, "after setting valid x and y, the point should NOT be identity point")
emptyX := new(big.Int).SetBytes(internal.ReverseScalarBytes([]byte{}))
identityPoint, err := ed25519.Point.Set(emptyX, y)
require.NoError(t, err)
require.Equalf(t, iden, identityPoint, "When x is empty, the point will be identity")
}
func TestPointEd25519Double(t *testing.T) {
ed25519 := ED25519()
g := ed25519.Point.Generator()
g2 := g.Double()
require.True(t, g2.Equal(g.Mul(ed25519.Scalar.New(2))))
i := ed25519.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointEd25519Neg(t *testing.T) {
ed25519 := ED25519()
g := ed25519.Point.Generator().Neg()
require.True(t, g.Neg().Equal(ed25519.Point.Generator()))
require.True(t, ed25519.Point.Identity().Neg().Equal(ed25519.Point.Identity()))
}
func TestPointEd25519Add(t *testing.T) {
ed25519 := ED25519()
pt := ed25519.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(ed25519.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointEd25519Sub(t *testing.T) {
ed25519 := ED25519()
g := ed25519.Point.Generator()
pt := ed25519.Point.Generator().Mul(ed25519.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointEd25519Mul(t *testing.T) {
ed25519 := ED25519()
g := ed25519.Point.Generator()
pt := ed25519.Point.Generator().Mul(ed25519.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointEd25519Serialize(t *testing.T) {
ed25519 := ED25519()
ss := ed25519.Scalar.Random(testRng())
g := ed25519.Point.Generator()
ppt := g.Mul(ss)
expectedC := []byte{0x7f, 0x5b, 0xa, 0xd9, 0xb8, 0xce, 0xb7, 0x7, 0x4c, 0x10, 0xc8, 0xb4, 0x27, 0xe8, 0xd2, 0x28, 0x50, 0x42, 0x6c, 0x0, 0x8a, 0x3, 0x72, 0x2b, 0x7c, 0x3c, 0x37, 0x6f, 0xf8, 0x8f, 0x42, 0x5d}
expectedU := []byte{0x70, 0xad, 0x4, 0xa1, 0x6, 0x8, 0x9f, 0x47, 0xe1, 0xe8, 0x9b, 0x9c, 0x81, 0x5a, 0xfb, 0xb9, 0x85, 0x6a, 0x2c, 0xa, 0xbc, 0xff, 0xe, 0xc6, 0xa0, 0xb0, 0xac, 0x75, 0xc, 0xd8, 0x59, 0x53, 0x7f, 0x5b, 0xa, 0xd9, 0xb8, 0xce, 0xb7, 0x7, 0x4c, 0x10, 0xc8, 0xb4, 0x27, 0xe8, 0xd2, 0x28, 0x50, 0x42, 0x6c, 0x0, 0x8a, 0x3, 0x72, 0x2b, 0x7c, 0x3c, 0x37, 0x6f, 0xf8, 0x8f, 0x42, 0x5d}
require.Equal(t, ppt.ToAffineCompressed(), expectedC)
require.Equal(t, ppt.ToAffineUncompressed(), expectedU)
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := ed25519.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 32)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 64)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointEd25519Nil(t *testing.T) {
ed25519 := ED25519()
one := ed25519.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, ed25519.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := ed25519.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointEd25519SumOfProducts(t *testing.T) {
lhs := new(PointEd25519).Generator().Mul(new(ScalarEd25519).New(50))
points := make([]Point, 5)
for i := range points {
points[i] = new(PointEd25519).Generator()
}
scalars := []Scalar{
new(ScalarEd25519).New(8),
new(ScalarEd25519).New(9),
new(ScalarEd25519).New(10),
new(ScalarEd25519).New(11),
new(ScalarEd25519).New(12),
}
rhs := lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
}
func TestPointEd25519VarTimeDoubleScalarBaseMult(t *testing.T) {
curve := ED25519()
h := curve.Point.Hash([]byte("TestPointEd25519VarTimeDoubleScalarBaseMult"))
a := curve.Scalar.New(23)
b := curve.Scalar.New(77)
H, ok := h.(*PointEd25519)
require.True(t, ok)
rhs := H.VarTimeDoubleScalarBaseMult(a, H, b)
lhs := h.Mul(a).Add(curve.Point.Generator().Mul(b))
require.True(t, lhs.Equal(rhs))
}
func toRSc(hx string) *ed.Scalar {
e, _ := hex.DecodeString(hx)
var data [32]byte
copy(data[:], e)
value, _ := new(ed.Scalar).SetCanonicalBytes(data[:])
return value
}
func toRPt(hx string) *ed.Point {
e, _ := hex.DecodeString(hx)
var data [32]byte
copy(data[:], e)
pt, _ := new(PointEd25519).FromAffineCompressed(data[:])
return pt.(*PointEd25519).value
}

View File

@ -0,0 +1,684 @@
//
// Copyright Quilibrium, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/subtle"
"errors"
"fmt"
"io"
"math/big"
"github.com/cloudflare/circl/ecc/goldilocks"
"github.com/cloudflare/circl/math/fp448"
"golang.org/x/crypto/sha3"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
)
type ScalarEd448 struct {
value *goldilocks.Scalar
}
type PointEd448 struct {
value *goldilocks.Point
}
var gscOne = goldilocks.Scalar{
1, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
}
var ed448Order = goldilocks.Scalar{
0xf3, 0x44, 0x58, 0xab, 0x92, 0xc2, 0x78, 0x23,
0x55, 0x8f, 0xc5, 0x8d, 0x72, 0xc2, 0x6c, 0x21,
0x90, 0x36, 0xd6, 0xae, 0x49, 0xdb, 0x4e, 0xc4,
0xe9, 0x23, 0xca, 0x7c, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x3f,
}
func (s *ScalarEd448) Random(reader io.Reader) Scalar {
if reader == nil {
return nil
}
var seed [57]byte
_, _ = reader.Read(seed[:])
return s.Hash(seed[:])
}
func (s *ScalarEd448) Hash(bytes []byte) Scalar {
raw := [114]byte{}
h := sha3.NewShake256()
_, _ = h.Write(bytes)
_, _ = h.Read(raw[:])
value := &goldilocks.Scalar{}
raw[0] &= 0xFC
raw[55] |= 0x80
raw[56] = 0x00
value.FromBytes(raw[:57])
return &ScalarEd448{value}
}
func (s *ScalarEd448) Zero() Scalar {
return &ScalarEd448{
value: &goldilocks.Scalar{},
}
}
func (s *ScalarEd448) One() Scalar {
value := &goldilocks.Scalar{}
value.FromBytes(gscOne[:])
return &ScalarEd448{value}
}
func (s *ScalarEd448) IsZero() bool {
i := byte(0)
for _, b := range s.value {
i |= b
}
return i == 0
}
func (s *ScalarEd448) IsOne() bool {
data := s.value
i := byte(0)
for j := 1; j < len(data); j++ {
i |= data[j]
}
return i == 0 && data[0] == 1
}
func (s *ScalarEd448) IsOdd() bool {
return s.value[0]&1 == 1
}
func (s *ScalarEd448) IsEven() bool {
return s.value[0]&1 == 0
}
func (s *ScalarEd448) New(input int) Scalar {
var data [56]byte
i := input
if input < 0 {
i = -input
}
data[0] = byte(i)
data[1] = byte(i >> 8)
data[2] = byte(i >> 16)
data[3] = byte(i >> 24)
value := &goldilocks.Scalar{}
value.FromBytes(data[:])
if input < 0 {
value.Neg()
}
return &ScalarEd448{
value,
}
}
func (s *ScalarEd448) Cmp(rhs Scalar) int {
r := s.Sub(rhs)
if r != nil && r.IsZero() {
return 0
} else {
return -2
}
}
func (s *ScalarEd448) Square() Scalar {
value := &goldilocks.Scalar{}
value.Mul(s.value, s.value)
return &ScalarEd448{value}
}
func (s *ScalarEd448) Double() Scalar {
value := &goldilocks.Scalar{}
value.Add(s.value, s.value)
return &ScalarEd448{value}
}
func (s *ScalarEd448) Invert() (Scalar, error) {
ret := new(big.Int)
order := new(big.Int)
buf := internal.ReverseScalarBytes(s.value[:])
orderBuf := internal.ReverseScalarBytes(ed448Order[:])
ret.SetBytes(buf)
order.SetBytes(orderBuf)
value := &goldilocks.Scalar{}
ret = ret.ModInverse(ret, order)
value.FromBytes(internal.ReverseScalarBytes(ret.Bytes()))
return &ScalarEd448{value}, nil
}
func (s *ScalarEd448) Sqrt() (Scalar, error) {
return nil, errors.New("not supported")
}
func (s *ScalarEd448) Cube() Scalar {
value := &goldilocks.Scalar{}
value.Mul(s.value, s.value)
value.Mul(value, s.value)
return &ScalarEd448{value}
}
func (s *ScalarEd448) Add(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd448)
if ok {
value := &goldilocks.Scalar{}
value.Add(s.value, r.value)
return &ScalarEd448{value}
} else {
return nil
}
}
func (s *ScalarEd448) Sub(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd448)
if ok {
value := &goldilocks.Scalar{}
value.Sub(s.value, r.value)
return &ScalarEd448{value}
} else {
return nil
}
}
func (s *ScalarEd448) Mul(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd448)
if ok {
value := &goldilocks.Scalar{}
value.Mul(s.value, r.value)
return &ScalarEd448{value}
} else {
return nil
}
}
func (s *ScalarEd448) MulAdd(y, z Scalar) Scalar {
yy, ok := y.(*ScalarEd448)
if !ok {
return nil
}
zz, ok := z.(*ScalarEd448)
if !ok {
return nil
}
value := &goldilocks.Scalar{}
value.Mul(s.value, yy.value)
value.Add(value, zz.value)
return &ScalarEd448{value}
}
func (s *ScalarEd448) Div(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarEd448)
if ok {
value, err := r.Invert()
if err != nil {
return nil
}
i, _ := value.(*ScalarEd448)
i.value.Mul(i.value, s.value)
return &ScalarEd448{value: i.value}
} else {
return nil
}
}
func (s *ScalarEd448) Neg() Scalar {
value := &goldilocks.Scalar{}
copy(value[:], s.value[:])
value.Neg()
return &ScalarEd448{value}
}
func (s *ScalarEd448) SetBigInt(x *big.Int) (Scalar, error) {
if x == nil {
return nil, fmt.Errorf("invalid value")
}
order := new(big.Int)
orderBuf := internal.ReverseScalarBytes(ed448Order[:])
order.SetBytes(orderBuf)
var v big.Int
buf := v.Mod(x, order).Bytes()
var rBuf [56]byte
copy(rBuf[:], internal.ReverseScalarBytes(buf))
value := &goldilocks.Scalar{}
value.FromBytes(rBuf[:])
return &ScalarEd448{value}, nil
}
func (s *ScalarEd448) BigInt() *big.Int {
var ret big.Int
buf := internal.ReverseScalarBytes(s.value[:])
return ret.SetBytes(buf)
}
func (s *ScalarEd448) Bytes() []byte {
return s.value[:]
}
// SetBytes takes input a 56-byte long array and returns a Ed448 scalar.
// The input must be 56-byte long and must be a reduced bytes.
func (s *ScalarEd448) SetBytes(input []byte) (Scalar, error) {
if len(input) != 56 {
return nil, fmt.Errorf("invalid byte sequence")
}
value := &goldilocks.Scalar{}
value.FromBytes(input[:])
return &ScalarEd448{value}, nil
}
// SetBytesWide takes input a 56-byte long byte array, reduce it and return an
// Ed448 scalar. If bytes is not of the right length, it returns nil and an
// error
func (s *ScalarEd448) SetBytesWide(bytes []byte) (Scalar, error) {
if len(bytes) != 56 {
return nil, fmt.Errorf("invalid byte sequence")
}
value := &goldilocks.Scalar{}
value.FromBytes(bytes[:])
return &ScalarEd448{value}, nil
}
// This function takes an input x and sets s = x, where x is a 56-byte
// little-endian encoding of s, then it returns the corresponding Ed448 scalar.
// If the input is not a canonical encoding of s, it returns nil and an error.
func (s *ScalarEd448) SetBytesCanonical(bytes []byte) (Scalar, error) {
return s.SetBytes(bytes)
}
func (s *ScalarEd448) Point() Point {
return new(PointEd448).Identity()
}
func (s *ScalarEd448) Clone() Scalar {
value := &goldilocks.Scalar{}
value.FromBytes(s.value[:])
return &ScalarEd448{value}
}
func (s *ScalarEd448) MarshalBinary() ([]byte, error) {
return scalarMarshalBinary(s)
}
func (s *ScalarEd448) UnmarshalBinary(input []byte) error {
sc, err := scalarUnmarshalBinary(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarEd448)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarEd448) MarshalText() ([]byte, error) {
return scalarMarshalText(s)
}
func (s *ScalarEd448) UnmarshalText(input []byte) error {
sc, err := scalarUnmarshalText(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarEd448)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarEd448) MarshalJSON() ([]byte, error) {
return scalarMarshalJson(s)
}
func (s *ScalarEd448) UnmarshalJSON(input []byte) error {
sc, err := scalarUnmarshalJson(input)
if err != nil {
return err
}
S, ok := sc.(*ScalarEd448)
if !ok {
return fmt.Errorf("invalid type")
}
s.value = S.value
return nil
}
func (p *PointEd448) Random(reader io.Reader) Point {
var seed [114]byte
_, _ = reader.Read(seed[:])
return p.Hash(seed[:])
}
func (p *PointEd448) Hash(bytes []byte) Point {
hashBytes := make([]byte, 114)
h := sha3.NewShake256()
_, _ = h.Write(bytes)
_, _ = h.Read(hashBytes[:])
value := &goldilocks.Scalar{}
hashBytes[0] &= 0xFC
hashBytes[55] |= 0x80
hashBytes[56] = 0x00
value.FromBytes(hashBytes[:57])
point := (goldilocks.Curve{}).ScalarBaseMult(value)
return &PointEd448{value: point}
}
func (p *PointEd448) Identity() Point {
return &PointEd448{
value: (goldilocks.Curve{}).Identity(),
}
}
func (p *PointEd448) Generator() Point {
return &PointEd448{
value: (goldilocks.Curve{}).Generator(),
}
}
func (p *PointEd448) IsIdentity() bool {
return p.Equal(p.Identity())
}
func (p *PointEd448) IsNegative() bool {
// Negative points don't really exist in Ed448
return false
}
func (p *PointEd448) IsOnCurve() bool {
err := (goldilocks.Curve{}).Identity().UnmarshalBinary(
p.ToAffineCompressed(),
)
return err == nil
}
func (p *PointEd448) Double() Point {
value, err := p.value.MarshalBinary()
if err != nil {
return nil
}
clone := &goldilocks.Point{}
if err := clone.UnmarshalBinary(value); err != nil {
return nil
}
clone.Double()
return &PointEd448{value: clone}
}
func (p *PointEd448) Scalar() Scalar {
return new(ScalarEd448).Zero()
}
func (p *PointEd448) Neg() Point {
value, err := p.value.MarshalBinary()
if err != nil {
return nil
}
clone := &goldilocks.Point{}
if err := clone.UnmarshalBinary(value); err != nil {
return nil
}
clone.Neg()
return &PointEd448{value: clone}
}
func (p *PointEd448) Add(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointEd448)
if ok {
value, err := p.value.MarshalBinary()
if err != nil {
return nil
}
clone := &goldilocks.Point{}
if err := clone.UnmarshalBinary(value); err != nil {
return nil
}
clone.Add(r.value)
return &PointEd448{value: clone}
} else {
return nil
}
}
func (p *PointEd448) Sub(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointEd448)
if ok {
value, err := r.value.MarshalBinary()
if err != nil {
return nil
}
clone := &goldilocks.Point{}
if err := clone.UnmarshalBinary(value); err != nil {
return nil
}
clone.Neg()
clone.Add(p.value)
return &PointEd448{value: clone}
} else {
return nil
}
}
func (p *PointEd448) Mul(rhs Scalar) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*ScalarEd448)
if ok {
value, err := p.value.MarshalBinary()
if err != nil {
return nil
}
clone := &goldilocks.Point{}
if err := clone.UnmarshalBinary(value); err != nil {
return nil
}
clone = (goldilocks.Curve{}).ScalarMult(r.value, clone)
return &PointEd448{value: clone}
} else {
return nil
}
}
func (p *PointEd448) Equal(rhs Point) bool {
r, ok := rhs.(*PointEd448)
if ok {
return p.value.IsEqual(r.value)
} else {
return false
}
}
func (p *PointEd448) Set(x, y *big.Int) (Point, error) {
// check is identity
xx := subtle.ConstantTimeCompare(x.Bytes(), []byte{})
yy := subtle.ConstantTimeCompare(y.Bytes(), []byte{})
if (xx | yy) == 1 {
return p.Identity(), nil
}
xElem := &fp448.Elt{}
yElem := &fp448.Elt{}
copy(xElem[:], internal.ReverseScalarBytes(x.Bytes()))
copy(yElem[:], internal.ReverseScalarBytes(y.Bytes()))
point, err := goldilocks.FromAffine(xElem, yElem)
if err != nil {
return nil, err
}
return &PointEd448{value: point}, nil
}
func (p *PointEd448) ToAffineCompressed() []byte {
affineCompressed, err := p.value.MarshalBinary()
if err != nil {
return nil
}
return affineCompressed
}
func (p *PointEd448) ToAffineUncompressed() []byte {
x, y := p.value.ToAffine()
var out [112]byte
copy(out[:56], x[:])
copy(out[56:], y[:])
return out[:]
}
func (p *PointEd448) FromAffineCompressed(inBytes []byte) (Point, error) {
pt := (&goldilocks.Point{})
err := pt.UnmarshalBinary(inBytes)
if err != nil {
return nil, err
}
return &PointEd448{value: pt}, nil
}
func (p *PointEd448) FromAffineUncompressed(inBytes []byte) (Point, error) {
if len(inBytes) != 112 {
return nil, fmt.Errorf("invalid byte sequence")
}
x := &fp448.Elt{}
copy(x[:], inBytes[:56])
y := &fp448.Elt{}
copy(y[:], inBytes[56:])
value, err := goldilocks.FromAffine(x, y)
if err != nil {
return nil, err
}
return &PointEd448{value}, nil
}
func (p *PointEd448) CurveName() string {
return ED448Name
}
func (p *PointEd448) SumOfProducts(points []Point, scalars []Scalar) Point {
// Unfortunately the primitives don't have have multi-scalar mult
// implementation so we're left to do it the slow way
nScalars := make([]*ScalarEd448, len(scalars))
nPoints := make([]*PointEd448, len(points))
for i, sc := range scalars {
s, ok := sc.(*ScalarEd448)
if !ok {
return nil
}
nScalars[i] = s
}
for i, pt := range points {
pp, ok := pt.(*PointEd448)
if !ok {
return nil
}
nPoints[i] = pp
}
accum := p.Identity().(*PointEd448)
for i, p := range nPoints {
s := nScalars[i]
accum = accum.Add(p.Mul(s)).(*PointEd448)
}
return &PointEd448{value: accum.value}
}
func (p *PointEd448) MarshalBinary() ([]byte, error) {
return pointMarshalBinary(p)
}
func (p *PointEd448) UnmarshalBinary(input []byte) error {
pt, err := pointUnmarshalBinary(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointEd448)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointEd448) MarshalText() ([]byte, error) {
return pointMarshalText(p)
}
func (p *PointEd448) UnmarshalText(input []byte) error {
pt, err := pointUnmarshalText(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointEd448)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointEd448) MarshalJSON() ([]byte, error) {
return pointMarshalJson(p)
}
func (p *PointEd448) UnmarshalJSON(input []byte) error {
pt, err := pointUnmarshalJson(input)
if err != nil {
return err
}
P, ok := pt.(*PointEd448)
if !ok {
return fmt.Errorf("invalid type")
}
p.value = P.value
return nil
}

View File

@ -0,0 +1,432 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
crand "crypto/rand"
"encoding/hex"
"fmt"
"math/big"
"testing"
"github.com/cloudflare/circl/ecc/goldilocks"
"github.com/stretchr/testify/require"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
)
func TestScalarEd448Random(t *testing.T) {
ed448 := ED448()
sc := ed448.Scalar.Random(testRng())
s, ok := sc.(*ScalarEd448)
require.True(t, ok)
expected := toGSc("3e3d89d4531a059a5e4fc4ba87d3ef5c94d7ca133d6087648b5442d1e15ffa038eab7b739c1d2ff034b474b62a1fe40c3f81841c1e807f1c")
require.Equal(t, s.value[:], expected[:])
// Try 10 random values
for i := 0; i < 10; i++ {
sc := ed448.Scalar.Random(crand.Reader)
_, ok := sc.(*ScalarEd448)
require.True(t, ok)
require.True(t, !sc.IsZero())
}
}
func TestScalarEd448Hash(t *testing.T) {
var b [32]byte
ed448 := ED448()
sc := ed448.Scalar.Hash(b[:])
s, ok := sc.(*ScalarEd448)
require.True(t, ok)
expected := toGSc("0e0ecc2b5ecf781cc81c38024194380cf16d9afad07c98eb49b0835dd6ad3062221e124311ec7f7181568de7938df805d894f5fded465001")
require.Equal(t, s.value[:], expected[:])
}
func TestScalarEd448Zero(t *testing.T) {
ed448 := ED448()
sc := ed448.Scalar.Zero()
require.True(t, sc.IsZero())
require.True(t, sc.IsEven())
}
func TestScalarEd448One(t *testing.T) {
ed448 := ED448()
sc := ed448.Scalar.One()
require.True(t, sc.IsOne())
require.True(t, sc.IsOdd())
}
func TestScalarEd448New(t *testing.T) {
ed448 := ED448()
three := ed448.Scalar.New(3)
require.True(t, three.IsOdd())
four := ed448.Scalar.New(4)
require.True(t, four.IsEven())
neg1 := ed448.Scalar.New(-1)
require.True(t, neg1.IsEven())
neg2 := ed448.Scalar.New(-2)
require.True(t, neg2.IsOdd())
}
func TestScalarEd448Square(t *testing.T) {
ed448 := ED448()
three := ed448.Scalar.New(3)
nine := ed448.Scalar.New(9)
require.Equal(t, three.Square().Cmp(nine), 0)
}
func TestScalarEd448Cube(t *testing.T) {
ed448 := ED448()
three := ed448.Scalar.New(3)
twentySeven := ed448.Scalar.New(27)
require.Equal(t, three.Cube().Cmp(twentySeven), 0)
}
func TestScalarEd448Double(t *testing.T) {
ed448 := ED448()
three := ed448.Scalar.New(3)
six := ed448.Scalar.New(6)
require.Equal(t, three.Double().Cmp(six), 0)
}
func TestScalarEd448Neg(t *testing.T) {
ed448 := ED448()
one := ed448.Scalar.One()
neg1 := ed448.Scalar.New(-1)
require.Equal(t, one.Neg().Cmp(neg1), 0)
lotsOfThrees := ed448.Scalar.New(333333)
expected := ed448.Scalar.New(-333333)
require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0)
}
func TestScalarEd448Invert(t *testing.T) {
ed448 := ED448()
nine := ed448.Scalar.New(9)
actual, _ := nine.Invert()
sa, _ := actual.(*ScalarEd448)
expected := toGSc("c042bf42c31643f7d9dd346bb116e767a573937d0c08ba1710db5345e3388ee3388ee3388ee3388ee3388ee3388ee3388ee3388ee3388e23")
require.Equal(t, sa.value[:], expected[:])
require.Equal(t, nine.Mul(actual).(*ScalarEd448).value[:], ed448.Scalar.New(1).(*ScalarEd448).value[:])
}
func TestScalarEd448Add(t *testing.T) {
ed448 := ED448()
nine := ed448.Scalar.New(9)
six := ed448.Scalar.New(6)
fifteen := nine.Add(six)
require.NotNil(t, fifteen)
expected := ed448.Scalar.New(15)
require.Equal(t, expected.Cmp(fifteen), 0)
upper := ed448.Scalar.New(-3)
actual := upper.Add(nine)
require.NotNil(t, actual)
require.Equal(t, actual.Cmp(six), 0)
}
func TestScalarEd448Sub(t *testing.T) {
ed448 := ED448()
nine := ed448.Scalar.New(9)
six := ed448.Scalar.New(6)
expected := ed448.Scalar.New(-3)
actual := six.Sub(nine)
require.Equal(t, expected.Cmp(actual), 0)
actual = nine.Sub(six)
require.Equal(t, actual.Cmp(ed448.Scalar.New(3)), 0)
}
func TestScalarEd448Mul(t *testing.T) {
ed448 := ED448()
nine := ed448.Scalar.New(9)
six := ed448.Scalar.New(6)
actual := nine.Mul(six)
require.Equal(t, actual.Cmp(ed448.Scalar.New(54)), 0)
upper := ed448.Scalar.New(-1)
require.Equal(t, upper.Mul(upper).Cmp(ed448.Scalar.New(1)), 0)
}
func TestScalarEd448Div(t *testing.T) {
ed448 := ED448()
nine := ed448.Scalar.New(9)
actual := nine.Div(nine)
require.Equal(t, actual.Cmp(ed448.Scalar.New(1)), 0)
require.Equal(t, ed448.Scalar.New(54).Div(nine).Cmp(ed448.Scalar.New(6)), 0)
}
func TestScalarEd448Serialize(t *testing.T) {
ed448 := ED448()
sc := ed448.Scalar.New(255)
sequence := sc.Bytes()
require.Equal(t, len(sequence), 56)
require.Equal(t, sequence, []byte{
0xff, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
})
ret, err := ed448.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
// Try 10 random values
for i := 0; i < 10; i++ {
sc = ed448.Scalar.Random(crand.Reader)
sequence = sc.Bytes()
require.Equal(t, len(sequence), 56)
ret, err = ed448.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
}
}
func TestScalarEd448Nil(t *testing.T) {
ed448 := ED448()
one := ed448.Scalar.New(1)
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, one.Div(nil))
require.Nil(t, ed448.Scalar.Random(nil))
require.Equal(t, one.Cmp(nil), -2)
_, err := ed448.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointEd448Random(t *testing.T) {
ed448 := ED448()
sc := ed448.Point.Random(testRng())
s, ok := sc.(*PointEd448)
require.True(t, ok)
fmt.Println(hex.EncodeToString(s.ToAffineCompressed()))
expected := toGPt("77ad569bd49c8a7228896c7e9c6a1af8f24912256f7fb0cce3de269932c5d64a3d2381bec8be6820a4ecfa4103d002ab8b5750b4beb1736400")
require.True(t, s.Equal(&PointEd448{expected}))
// Try 25 random values
for i := 0; i < 25; i++ {
sc := ed448.Point.Random(crand.Reader)
_, ok := sc.(*PointEd448)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
pBytes := sc.ToAffineCompressed()
_, err := goldilocks.FromBytes(pBytes)
require.NoError(t, err)
}
}
func TestPointEd448Hash(t *testing.T) {
var b [114]byte
ed448 := ED448()
sc := ed448.Point.Hash(b[:])
s, ok := sc.(*PointEd448)
require.True(t, ok)
expected := toGPt("65458b113e6a77dbdfd75726961167cce206ac30022caf9153fb4754301943d3c58a95332b8119240905a551e18310e8f0dfc66d3cd0cb7700")
require.True(t, s.Equal(&PointEd448{expected}))
// Fuzz test
for i := 0; i < 25; i++ {
_, _ = crand.Read(b[:])
sc = ed448.Point.Hash(b[:])
require.NotNil(t, sc)
}
}
func TestPointEd448Identity(t *testing.T) {
ed448 := ED448()
sc := ed448.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{
0x1, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0})
}
func TestPointEd448Generator(t *testing.T) {
ed448 := ED448()
sc := ed448.Point.Generator()
s, ok := sc.(*PointEd448)
require.True(t, ok)
require.Equal(t, s.ToAffineCompressed(), []byte{
0x14, 0xfa, 0x30, 0xf2, 0x5b, 0x79, 0x08, 0x98,
0xad, 0xc8, 0xd7, 0x4e, 0x2c, 0x13, 0xbd, 0xfd,
0xc4, 0x39, 0x7c, 0xe6, 0x1c, 0xff, 0xd3, 0x3a,
0xd7, 0xc2, 0xa0, 0x05, 0x1e, 0x9c, 0x78, 0x87,
0x40, 0x98, 0xa3, 0x6c, 0x73, 0x73, 0xea, 0x4b,
0x62, 0xc7, 0xc9, 0x56, 0x37, 0x20, 0x76, 0x88,
0x24, 0xbc, 0xb6, 0x6e, 0x71, 0x46, 0x3f, 0x69,
0x0})
}
func TestPointEd448Set(t *testing.T) {
ed448 := ED448()
iden, err := ed448.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
xBytes, _ := hex.DecodeString("a913f565d4ddd5560df211dcf06ffa25297cb3ce3ae4495f6dff0d6486e7d319bf2ce0ef040cafaf8a3a7d9bc6c91bd2492897d0dd1012a8")
yBytes, _ := hex.DecodeString("a9a0d4631f1cab9a00824d28670704b02f912470adbed436de82bc89b87b97c99e1ff55ae1afa8e377ced6a47ef6cd895f0b3588089fa1a6")
x := new(big.Int).SetBytes(internal.ReverseScalarBytes(xBytes))
y := new(big.Int).SetBytes(internal.ReverseScalarBytes(yBytes))
newPoint, err := ed448.Point.Set(x, y)
require.NoError(t, err)
require.NotEqualf(t, iden, newPoint, "after setting valid x and y, the point should NOT be identity point")
emptyX := new(big.Int).SetBytes(internal.ReverseScalarBytes([]byte{}))
identityPoint, err := ed448.Point.Set(emptyX, y)
require.NoError(t, err)
require.Equalf(t, iden, identityPoint, "When x is empty, the point will be identity")
}
func TestPointEd448Double(t *testing.T) {
ed448 := ED448()
g := ed448.Point.Generator()
g2 := g.Double()
require.True(t, g2.Equal(g.Mul(ed448.Scalar.New(2))))
i := ed448.Point.Identity()
require.True(t, i.Double().Equal(i))
}
func TestPointEd448Neg(t *testing.T) {
ed448 := ED448()
g := ed448.Point.Generator().Neg()
require.True(t, g.Neg().Equal(ed448.Point.Generator()))
require.True(t, ed448.Point.Identity().Neg().Equal(ed448.Point.Identity()))
}
func TestPointEd448Add(t *testing.T) {
ed448 := ED448()
pt := ed448.Point.Generator()
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(ed448.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointEd448Sub(t *testing.T) {
ed448 := ED448()
g := ed448.Point.Generator()
pt := ed448.Point.Generator().Mul(ed448.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointEd448Mul(t *testing.T) {
ed448 := ED448()
g := ed448.Point.Generator()
pt := ed448.Point.Generator().Mul(ed448.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointEd448Serialize(t *testing.T) {
ed448 := ED448()
ss := ed448.Scalar.Random(testRng())
g := ed448.Point.Generator()
ppt := g.Mul(ss)
expectedC := []byte{
0xe0, 0x75, 0x8a, 0x33, 0x26, 0x79, 0x39, 0xa3,
0x94, 0xfb, 0x5c, 0xcb, 0x20, 0x2e, 0xe8, 0x51,
0xce, 0xbc, 0x2e, 0x89, 0xc9, 0x1a, 0xc1, 0x28,
0x9e, 0x2b, 0xfc, 0xdd, 0xfd, 0x9f, 0xf9, 0xfc,
0x56, 0x94, 0xb0, 0xf5, 0x69, 0xd7, 0xf7, 0xe9,
0xda, 0x16, 0xe1, 0xcd, 0xe9, 0x30, 0x1b, 0x29,
0xf4, 0x81, 0x28, 0xb3, 0xcb, 0xd1, 0x16, 0x85,
0x80,
}
expectedU := []byte{
0x95, 0xcd, 0x44, 0x60, 0xa4, 0x5d, 0x47, 0x87,
0x44, 0x71, 0x93, 0xd5, 0xc5, 0x38, 0xcb, 0x8b,
0xec, 0x3a, 0x86, 0xae, 0x1a, 0xba, 0xf9, 0x24,
0xa8, 0x4b, 0x25, 0x20, 0x47, 0x4c, 0xa1, 0x6c,
0xe0, 0x33, 0x8d, 0xaa, 0xda, 0x54, 0x1a, 0x57,
0x56, 0x86, 0x22, 0xc7, 0xbf, 0x24, 0x74, 0x7c,
0xed, 0xd3, 0x6a, 0xad, 0x08, 0xb7, 0x7e, 0xd8,
0xe0, 0x75, 0x8a, 0x33, 0x26, 0x79, 0x39, 0xa3,
0x94, 0xfb, 0x5c, 0xcb, 0x20, 0x2e, 0xe8, 0x51,
0xce, 0xbc, 0x2e, 0x89, 0xc9, 0x1a, 0xc1, 0x28,
0x9e, 0x2b, 0xfc, 0xdd, 0xfd, 0x9f, 0xf9, 0xfc,
0x56, 0x94, 0xb0, 0xf5, 0x69, 0xd7, 0xf7, 0xe9,
0xda, 0x16, 0xe1, 0xcd, 0xe9, 0x30, 0x1b, 0x29,
0xf4, 0x81, 0x28, 0xb3, 0xcb, 0xd1, 0x16, 0x85,
}
require.Equal(t, ppt.ToAffineCompressed(), expectedC)
require.Equal(t, ppt.ToAffineUncompressed(), expectedU)
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := ed448.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 57)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 112)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointEd448Nil(t *testing.T) {
ed448 := ED448()
one := ed448.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, ed448.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := ed448.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointEd448SumOfProducts(t *testing.T) {
lhs := new(PointEd448).Generator().Mul(new(ScalarEd448).New(50))
points := make([]Point, 5)
for i := range points {
points[i] = new(PointEd448).Generator()
}
scalars := []Scalar{
new(ScalarEd448).New(8),
new(ScalarEd448).New(9),
new(ScalarEd448).New(10),
new(ScalarEd448).New(11),
new(ScalarEd448).New(12),
}
rhs := lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
}
func toGSc(hx string) *goldilocks.Scalar {
e, _ := hex.DecodeString(hx)
var data [56]byte
copy(data[:], e)
value := &goldilocks.Scalar{}
value.FromBytes(data[:])
return value
}
func toGPt(hx string) *goldilocks.Point {
e, _ := hex.DecodeString(hx)
var data [57]byte
copy(data[:], e)
pt, _ := new(PointEd448).FromAffineCompressed(data[:])
return pt.(*PointEd448).value
}

View File

@ -0,0 +1,280 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
// Package curves: Field implementation IS NOT constant time as it leverages math/big for big number operations.
package curves
import (
"crypto/rand"
"encoding/json"
"fmt"
"io"
"math/big"
"sync"
)
var ed25519SubGroupOrderOnce sync.Once
var ed25519SubGroupOrder *big.Int
// Field is a finite field.
type Field struct {
*big.Int
}
// Element is a group element within a finite field.
type Element struct {
Modulus *Field `json:"modulus"`
Value *big.Int `json:"value"`
}
// ElementJSON is used in JSON<>Element conversions.
// For years, big.Int hasn't properly supported JSON unmarshaling
// https://github.com/golang/go/issues/28154
type ElementJSON struct {
Modulus string `json:"modulus"`
Value string `json:"value"`
}
// Marshal Element to JSON
func (x *Element) MarshalJSON() ([]byte, error) {
return json.Marshal(ElementJSON{
Modulus: x.Modulus.String(),
Value: x.Value.String(),
})
}
func (x *Element) UnmarshalJSON(bytes []byte) error {
var e ElementJSON
err := json.Unmarshal(bytes, &e)
if err != nil {
return err
}
// Convert the strings to big.Ints
modulus, ok := new(big.Int).SetString(e.Modulus, 10)
if !ok {
return fmt.Errorf("failed to unmarshal modulus string '%v' to big.Int", e.Modulus)
}
x.Modulus = &Field{modulus}
x.Value, ok = new(big.Int).SetString(e.Value, 10)
if !ok {
return fmt.Errorf("failed to unmarshal value string '%v' to big.Int", e.Value)
}
return nil
}
// The probability of returning true for a randomly chosen
// non-prime is at most ¼ⁿ. 64 is a widely used standard
// that is more than sufficient.
const millerRabinRounds = 64
// New is a constructor for a Field.
func NewField(modulus *big.Int) *Field {
// For our purposes we never expect to be dealing with a non-prime field. This provides some protection against
// accidentally doing that.
if !modulus.ProbablyPrime(millerRabinRounds) {
panic(fmt.Sprintf("modulus: %x is not a prime", modulus))
}
return &Field{modulus}
}
func newElement(field *Field, value *big.Int) *Element {
if !field.IsValid(value) {
panic(fmt.Sprintf("value: %x is not within field: %x", value, field))
}
return &Element{field, value}
}
// IsValid returns whether or not the value is within [0, modulus)
func (f Field) IsValid(value *big.Int) bool {
// value < modulus && value >= 0
return value.Cmp(f.Int) < 0 && value.Sign() >= 0
}
func (f Field) NewElement(value *big.Int) *Element {
return newElement(&f, value)
}
func (f Field) Zero() *Element {
return newElement(&f, big.NewInt(0))
}
func (f Field) One() *Element {
return newElement(&f, big.NewInt(1))
}
func (f Field) RandomElement(r io.Reader) (*Element, error) {
if r == nil {
r = rand.Reader
}
var randInt *big.Int
var err error
// Ed25519 needs to do special handling
// in case the value is used in
// Scalar multiplications with points
if f.Int.Cmp(Ed25519Order()) == 0 {
scalar := NewEd25519Scalar()
randInt, err = scalar.RandomWithReader(r)
} else {
// Read a random integer within the field. This is defined as [0, max) so we don't need to
// explicitly check it is within the field. If it is not, NewElement will panic anyways.
randInt, err = rand.Int(r, f.Int)
}
if err != nil {
return nil, err
}
return newElement(&f, randInt), nil
}
// ElementFromBytes initializes a new field element from big-endian bytes
func (f Field) ElementFromBytes(bytes []byte) *Element {
return newElement(&f, new(big.Int).SetBytes(bytes))
}
// ReducedElementFromBytes initializes a new field element from big-endian bytes and reduces it by
// the modulus of the field.
//
// WARNING: If this is used with cryptographic constructions which rely on a uniform distribution of
// values, this may introduce a bias to the value of the returned field element. This happens when
// the integer range of the provided bytes is not an integer multiple of the field order.
//
// Assume we are working in field which a modulus of 3 and the range of the uniform random bytes we
// provide as input is 5. Thus, the set of field elements is {0, 1, 2} and the set of integer values
// for the input bytes is: {0, 1, 2, 3, 4}. What is the distribution of the output values produced
// by this function?
//
// ReducedElementFromBytes(0) => 0
// ReducedElementFromBytes(1) => 1
// ReducedElementFromBytes(2) => 2
// ReducedElementFromBytes(3) => 0
// ReducedElementFromBytes(4) => 1
//
// For a value space V and random value v, a uniform distribution is defined as P[V = v] = 1/|V|
// where |V| is to the order of the field. Using the results from above, we see that P[v = 0] = 2/5,
// P[v = 1] = 2/5, and P[v = 2] = 1/5. For a uniform distribution we would expect these to each be
// equal to 1/3. As they do not, this does not return uniform output for that example.
//
// To see why this is okay if the range is a multiple of the field order, change the input range to
// 6 and notice that now each output has a probability of 2/6 = 1/3, and the output is uniform.
func (f Field) ReducedElementFromBytes(bytes []byte) *Element {
value := new(big.Int).SetBytes(bytes)
value.Mod(value, f.Int)
return newElement(&f, value)
}
func (x Element) Field() *Field {
return x.Modulus
}
// Add returns the sum x+y
func (x Element) Add(y *Element) *Element {
x.validateFields(y)
sum := new(big.Int).Add(x.Value, y.Value)
sum.Mod(sum, x.Modulus.Int)
return newElement(x.Modulus, sum)
}
// Sub returns the difference x-y
func (x Element) Sub(y *Element) *Element {
x.validateFields(y)
difference := new(big.Int).Sub(x.Value, y.Value)
difference.Mod(difference, x.Modulus.Int)
return newElement(x.Modulus, difference)
}
// Neg returns the field negation
func (x Element) Neg() *Element {
z := new(big.Int).Neg(x.Value)
z.Mod(z, x.Modulus.Int)
return newElement(x.Modulus, z)
}
// Mul returns the product x*y
func (x Element) Mul(y *Element) *Element {
x.validateFields(y)
product := new(big.Int).Mul(x.Value, y.Value)
product.Mod(product, x.Modulus.Int)
return newElement(x.Modulus, product)
}
// Div returns the quotient x/y
func (x Element) Div(y *Element) *Element {
x.validateFields(y)
yInv := new(big.Int).ModInverse(y.Value, x.Modulus.Int)
quotient := new(big.Int).Mul(x.Value, yInv)
quotient.Mod(quotient, x.Modulus.Int)
return newElement(x.Modulus, quotient)
}
// Pow computes x^y reduced by the modulus
func (x Element) Pow(y *Element) *Element {
x.validateFields(y)
return newElement(x.Modulus, new(big.Int).Exp(x.Value, y.Value, x.Modulus.Int))
}
func (x Element) Invert() *Element {
return newElement(x.Modulus, new(big.Int).ModInverse(x.Value, x.Modulus.Int))
}
func (x Element) Sqrt() *Element {
return newElement(x.Modulus, new(big.Int).ModSqrt(x.Value, x.Modulus.Int))
}
// BigInt returns value as a big.Int
func (x Element) BigInt() *big.Int {
return x.Value
}
// Bytes returns the value as bytes
func (x Element) Bytes() []byte {
return x.BigInt().Bytes()
}
// IsEqual returns x == y
func (x Element) IsEqual(y *Element) bool {
if !x.isEqualFields(y) {
return false
}
return x.Value.Cmp(y.Value) == 0
}
// Clone returns a new copy of the element
func (x Element) Clone() *Element {
return x.Modulus.ElementFromBytes(x.Bytes())
}
func (x Element) isEqualFields(y *Element) bool {
return x.Modulus.Int.Cmp(y.Modulus.Int) == 0
}
func (x Element) validateFields(y *Element) {
if !x.isEqualFields(y) {
panic("fields must match for valid binary operation")
}
}
// SubgroupOrder returns the order of the Ed25519 base Point.
func Ed25519Order() *big.Int {
ed25519SubGroupOrderOnce.Do(func() {
order, ok := new(big.Int).SetString(
"1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED",
16,
)
if !ok {
panic("invalid hex string provided. This should never happen as it is constant.")
}
ed25519SubGroupOrder = order
})
return ed25519SubGroupOrder
}

View File

@ -0,0 +1,301 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"encoding/json"
"errors"
"fmt"
"math/big"
"testing"
"github.com/stretchr/testify/require"
)
var (
one = big.NewInt(1)
modulus, modulusOk = new(big.Int).SetString(
"1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED",
16,
)
oneBelowModulus = zero().Sub(modulus, one)
oneAboveModulus = zero().Add(modulus, one)
field25519 = NewField(modulus)
)
type buggedReader struct{}
func (r buggedReader) Read(p []byte) (n int, err error) {
return 0, errors.New("EOF")
}
func zero() *big.Int {
return new(big.Int)
}
func assertElementZero(t *testing.T, e *Element) {
require.Equal(t, zero().Bytes(), e.Bytes())
}
type binaryOperation func(*Element) *Element
func assertUnequalFieldsPanic(t *testing.T, b binaryOperation) {
altField := NewField(big.NewInt(23))
altElement := altField.NewElement(one)
require.PanicsWithValue(
t,
"fields must match for valid binary operation",
func() { b(altElement) },
)
}
func TestFieldModulus(t *testing.T) {
require.True(t, modulusOk)
}
func TestNewField(t *testing.T) {
require.PanicsWithValue(
t,
fmt.Sprintf("modulus: %x is not a prime", oneBelowModulus),
func() { NewField(oneBelowModulus) },
)
require.NotPanics(
t,
func() { NewField(modulus) },
)
}
func TestNewElement(t *testing.T) {
require.PanicsWithValue(
t,
fmt.Sprintf("value: %x is not within field: %x", modulus, field25519.Int),
func() { newElement(field25519, modulus) },
)
require.NotPanics(
t,
func() { newElement(field25519, oneBelowModulus) },
)
}
func TestElementIsValid(t *testing.T) {
require.False(t, field25519.IsValid(zero().Neg(one)))
require.False(t, field25519.IsValid(modulus))
require.False(t, field25519.IsValid(oneAboveModulus))
require.True(t, field25519.IsValid(oneBelowModulus))
}
func TestFieldNewElement(t *testing.T) {
element := field25519.NewElement(oneBelowModulus)
require.Equal(t, oneBelowModulus, element.Value)
require.Equal(t, field25519, element.Field())
}
func TestZeroElement(t *testing.T) {
require.Equal(t, zero(), field25519.Zero().Value)
require.Equal(t, field25519, field25519.Zero().Field())
}
func TestOneElement(t *testing.T) {
require.Equal(t, field25519.One().Value, one)
require.Equal(t, field25519.One().Field(), field25519)
}
func TestRandomElement(t *testing.T) {
randomElement1, err := field25519.RandomElement(nil)
require.NoError(t, err)
randomElement2, err := field25519.RandomElement(nil)
require.NoError(t, err)
randomElement3, err := field25519.RandomElement(new(buggedReader))
require.Error(t, err)
require.Equal(t, field25519, randomElement1.Field())
require.Equal(t, field25519, randomElement2.Field())
require.NotEqual(t, randomElement1.Value, randomElement2.Value)
require.Nil(t, randomElement3)
}
func TestElementFromBytes(t *testing.T) {
element := field25519.ElementFromBytes(oneBelowModulus.Bytes())
require.Equal(t, field25519, element.Field())
require.Equal(t, oneBelowModulus, element.Value)
}
func TestReducedElementFromBytes(t *testing.T) {
element := field25519.ReducedElementFromBytes(oneBelowModulus.Bytes())
require.Equal(t, field25519, element.Field())
require.Equal(t, oneBelowModulus, element.Value)
element = field25519.ReducedElementFromBytes(oneAboveModulus.Bytes())
require.Equal(t, field25519, element.Field())
require.Equal(t, one, element.Value)
}
func TestAddElement(t *testing.T) {
element1 := field25519.NewElement(one)
element2 := field25519.NewElement(big.NewInt(2))
element3 := field25519.NewElement(oneBelowModulus)
element4 := &Element{field25519, modulus}
require.Equal(t, element2, element1.Add(element1))
require.Equal(t, big.NewInt(3), element1.Add(element2).Value)
require.Equal(t, big.NewInt(3), element2.Add(element1).Value)
require.Equal(t, one, element1.Add(element4).Value)
require.Equal(t, one, element3.Add(element2).Value)
assertElementZero(t, element1.Add(element3))
assertUnequalFieldsPanic(t, element1.Add)
}
func TestSubElement(t *testing.T) {
element1 := field25519.NewElement(one)
element2 := field25519.NewElement(big.NewInt(2))
element3 := field25519.NewElement(oneBelowModulus)
element4 := &Element{field25519, modulus}
assertElementZero(t, element1.Sub(element1))
require.Equal(t, element3, element1.Sub(element2))
require.Equal(t, element1, element2.Sub(element1))
require.Equal(t, element1, element1.Sub(element4))
require.Equal(t, element3, element4.Sub(element1))
require.Equal(t, element1, element4.Sub(element3))
require.Equal(t, element3, element3.Sub(element4))
assertUnequalFieldsPanic(t, element1.Sub)
}
func TestMulElement(t *testing.T) {
element1 := field25519.NewElement(one)
element2 := field25519.NewElement(big.NewInt(2))
element3 := field25519.NewElement(oneBelowModulus)
element4 := field25519.NewElement(zero())
expectedProduct, ok := new(big.Int).SetString(
"1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3eb",
16,
)
require.True(t, ok)
assertElementZero(t, element1.Mul(element4))
assertElementZero(t, element4.Mul(element1))
require.Equal(t, element3, element1.Mul(element3))
require.Equal(t, element3, element3.Mul(element1))
require.Equal(t, expectedProduct, element3.Mul(element2).Value)
require.Equal(t, expectedProduct, element2.Mul(element3).Value)
assertUnequalFieldsPanic(t, element1.Mul)
}
func TestDivElement(t *testing.T) {
element1 := field25519.NewElement(one)
element2 := field25519.NewElement(big.NewInt(2))
element3 := field25519.NewElement(oneBelowModulus)
element4 := field25519.NewElement(zero())
expectedQuotient1, ok := new(big.Int).SetString(
"80000000000000000000000000000000a6f7cef517bce6b2c09318d2e7ae9f6",
16,
)
require.True(t, ok)
expectedQuotient2, ok := new(big.Int).SetString(
"1000000000000000000000000000000014def9dea2f79cd65812631a5cf5d3eb",
16,
)
require.True(t, ok)
assertElementZero(t, element4.Div(element3))
require.Equal(t, element3, element3.Div(element1))
require.Equal(t, expectedQuotient1, element3.Div(element2).Value)
require.Equal(t, expectedQuotient2, element2.Div(element3).Value)
require.Panics(t, func() { element3.Div(element4) })
assertUnequalFieldsPanic(t, element1.Div)
}
func TestIsEqualElement(t *testing.T) {
element1 := field25519.NewElement(oneBelowModulus)
element2 := field25519.NewElement(big.NewInt(23))
element3 := field25519.NewElement(oneBelowModulus)
altField := NewField(big.NewInt(23))
altElement1 := altField.NewElement(one)
require.False(t, element1.IsEqual(element2))
require.True(t, element1.IsEqual(element3))
require.True(t, element1.IsEqual(element1))
require.False(t, element1.IsEqual(altElement1))
}
func TestBigIntElement(t *testing.T) {
element := field25519.NewElement(oneBelowModulus)
require.Equal(t, oneBelowModulus, element.BigInt())
}
func TestBytesElement(t *testing.T) {
element := field25519.NewElement(oneBelowModulus)
require.Equal(
t,
[]byte{
0x10, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,
0x0, 0x0, 0x0, 0x0, 0x0, 0x14, 0xde, 0xf9, 0xde, 0xa2,
0xf7, 0x9c, 0xd6, 0x58, 0x12, 0x63, 0x1a, 0x5c, 0xf5,
0xd3, 0xec,
},
element.Bytes(),
)
}
func TestCloneElement(t *testing.T) {
element := field25519.NewElement(oneBelowModulus)
clone := element.Clone()
require.Equal(t, clone, element)
clone.Value.Add(one, one)
require.NotEqual(t, clone, element)
}
// Tests un/marshaling Element
func TestElementMarshalJsonRoundTrip(t *testing.T) {
reallyBigInt1, ok := new(big.Int).SetString("12365234878725472538962348629568356835892346729834725643857832", 10)
require.True(t, ok)
reallyBigInt2, ok := new(big.Int).SetString("123652348787DEF9DEA2F79CD65812631A5CF5D3ED46729834725643857832", 16)
require.True(t, ok)
ins := []*Element{
newElement(field25519, big.NewInt(300)),
newElement(field25519, big.NewInt(300000)),
newElement(field25519, big.NewInt(12812798)),
newElement(field25519, big.NewInt(17)),
newElement(field25519, big.NewInt(5066680)),
newElement(field25519, big.NewInt(3005)),
newElement(field25519, big.NewInt(317)),
newElement(field25519, big.NewInt(323)),
newElement(field25519, reallyBigInt1),
newElement(field25519, reallyBigInt2),
newElement(field25519, oneBelowModulus),
}
// Run all the tests!
for _, in := range ins {
bytes, err := json.Marshal(in)
require.NoError(t, err)
require.NotNil(t, bytes)
// Unmarshal and test
out := &Element{}
err = json.Unmarshal(bytes, &out)
require.NoError(t, err)
require.NotNil(t, out)
require.NotNil(t, out.Modulus)
require.NotNil(t, out.Value)
require.Equal(t, in.Modulus.Bytes(), out.Modulus.Bytes())
require.Equal(t, in.Value.Bytes(), out.Value.Bytes())
}
}

View File

@ -0,0 +1,446 @@
package curves
import (
crand "crypto/rand"
"crypto/sha256"
"io"
"math/big"
"testing"
"github.com/btcsuite/btcd/btcec"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
mod "source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core"
)
func BenchmarkK256(b *testing.B) {
// 1000 points
b.Run("1000 point add - btcec", func(b *testing.B) {
b.StopTimer()
points := make([]*BenchPoint, 1000)
for i := range points {
points[i] = points[i].Random(crand.Reader).(*BenchPoint)
}
acc := new(BenchPoint).Identity()
b.StartTimer()
for _, pt := range points {
acc = acc.Add(pt)
}
})
b.Run("1000 point add - ct k256", func(b *testing.B) {
b.StopTimer()
curve := K256()
points := make([]*PointK256, 1000)
for i := range points {
points[i] = curve.NewIdentityPoint().Random(crand.Reader).(*PointK256)
}
acc := curve.NewIdentityPoint()
b.StartTimer()
for _, pt := range points {
acc = acc.Add(pt)
}
})
b.Run("1000 point double - btcec", func(b *testing.B) {
b.StopTimer()
acc := new(BenchPoint).Generator()
b.StartTimer()
for i := 0; i < 1000; i++ {
acc = acc.Double()
}
})
b.Run("1000 point double - ct k256", func(b *testing.B) {
b.StopTimer()
acc := new(PointK256).Generator()
b.StartTimer()
for i := 0; i < 1000; i++ {
acc = acc.Double()
}
})
b.Run("1000 point multiply - btcec", func(b *testing.B) {
b.StopTimer()
scalars := make([]*BenchScalar, 1000)
for i := range scalars {
s := new(BenchScalar).Random(crand.Reader)
scalars[i] = s.(*BenchScalar)
}
acc := new(BenchPoint).Generator().Mul(new(BenchScalar).New(2))
b.StartTimer()
for _, sc := range scalars {
acc = acc.Mul(sc)
}
})
b.Run("1000 point multiply - ct k256", func(b *testing.B) {
b.StopTimer()
scalars := make([]*ScalarK256, 1000)
for i := range scalars {
s := new(ScalarK256).Random(crand.Reader)
scalars[i] = s.(*ScalarK256)
}
acc := new(PointK256).Generator()
b.StartTimer()
for _, sc := range scalars {
acc = acc.Mul(sc)
}
})
b.Run("1000 scalar invert - btcec", func(b *testing.B) {
b.StopTimer()
scalars := make([]*BenchScalar, 1000)
for i := range scalars {
s := new(BenchScalar).Random(crand.Reader)
scalars[i] = s.(*BenchScalar)
}
b.StartTimer()
for _, sc := range scalars {
_, _ = sc.Invert()
}
})
b.Run("1000 scalar invert - ct k256", func(b *testing.B) {
b.StopTimer()
scalars := make([]*ScalarK256, 1000)
for i := range scalars {
s := new(ScalarK256).Random(crand.Reader)
scalars[i] = s.(*ScalarK256)
}
b.StartTimer()
for _, sc := range scalars {
_, _ = sc.Invert()
}
})
b.Run("1000 scalar sqrt - btcec", func(b *testing.B) {
b.StopTimer()
scalars := make([]*BenchScalar, 1000)
for i := range scalars {
s := new(BenchScalar).Random(crand.Reader)
scalars[i] = s.(*BenchScalar)
}
b.StartTimer()
for _, sc := range scalars {
_, _ = sc.Sqrt()
}
})
b.Run("1000 scalar sqrt - ct k256", func(b *testing.B) {
b.StopTimer()
scalars := make([]*ScalarK256, 1000)
for i := range scalars {
s := new(ScalarK256).Random(crand.Reader)
scalars[i] = s.(*ScalarK256)
}
b.StartTimer()
for _, sc := range scalars {
_, _ = sc.Sqrt()
}
})
}
type BenchScalar struct {
value *big.Int
}
func (s *BenchScalar) Random(reader io.Reader) Scalar {
var v [32]byte
_, _ = reader.Read(v[:])
value := new(big.Int).SetBytes(v[:])
return &BenchScalar{
value: value.Mod(value, btcec.S256().N),
}
}
func (s *BenchScalar) Hash(bytes []byte) Scalar {
h := sha256.Sum256(bytes)
value := new(big.Int).SetBytes(h[:])
return &BenchScalar{
value: value.Mod(value, btcec.S256().N),
}
}
func (s *BenchScalar) Zero() Scalar {
return &BenchScalar{
value: big.NewInt(0),
}
}
func (s *BenchScalar) One() Scalar {
return &BenchScalar{
value: big.NewInt(1),
}
}
func (s *BenchScalar) IsZero() bool {
return s.value.Cmp(big.NewInt(0)) == 0
}
func (s *BenchScalar) IsOne() bool {
return s.value.Cmp(big.NewInt(1)) == 0
}
func (s *BenchScalar) IsOdd() bool {
return s.value.Bit(0) == 1
}
func (s *BenchScalar) IsEven() bool {
return s.value.Bit(0) == 0
}
func (s *BenchScalar) New(value int) Scalar {
v := big.NewInt(int64(value))
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Cmp(rhs Scalar) int {
r := rhs.(*BenchScalar)
return s.value.Cmp(r.value)
}
func (s *BenchScalar) Square() Scalar {
v := new(big.Int).Mul(s.value, s.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Double() Scalar {
v := new(big.Int).Add(s.value, s.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Invert() (Scalar, error) {
return &BenchScalar{
value: new(big.Int).ModInverse(s.value, btcec.S256().N),
}, nil
}
func (s *BenchScalar) Sqrt() (Scalar, error) {
return &BenchScalar{
value: new(big.Int).ModSqrt(s.value, btcec.S256().N),
}, nil
}
func (s *BenchScalar) Cube() Scalar {
v := new(big.Int).Mul(s.value, s.value)
v.Mul(v, s.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Add(rhs Scalar) Scalar {
r := rhs.(*BenchScalar)
v := new(big.Int).Add(s.value, r.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Sub(rhs Scalar) Scalar {
r := rhs.(*BenchScalar)
v := new(big.Int).Sub(s.value, r.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Mul(rhs Scalar) Scalar {
r := rhs.(*BenchScalar)
v := new(big.Int).Mul(s.value, r.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) MulAdd(y, z Scalar) Scalar {
yy := y.(*BenchScalar)
zz := z.(*BenchScalar)
v := new(big.Int).Mul(s.value, yy.value)
v.Add(v, zz.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Div(rhs Scalar) Scalar {
r := rhs.(*BenchScalar)
v := new(big.Int).ModInverse(r.value, btcec.S256().N)
v.Mul(v, s.value)
return &BenchScalar{
value: v.Mod(v, btcec.S256().N),
}
}
func (s *BenchScalar) Neg() Scalar {
v, _ := mod.Neg(s.value, btcec.S256().N)
return &BenchScalar{
value: v,
}
}
func (s *BenchScalar) SetBigInt(v *big.Int) (Scalar, error) {
return &BenchScalar{
value: new(big.Int).Set(v),
}, nil
}
func (s *BenchScalar) BigInt() *big.Int {
return new(big.Int).Set(s.value)
}
func (s *BenchScalar) Point() Point {
return (&BenchPoint{}).Identity()
}
func (s *BenchScalar) Bytes() []byte {
return internal.ReverseScalarBytes(s.value.Bytes())
}
func (s *BenchScalar) SetBytes(bytes []byte) (Scalar, error) {
value := new(big.Int).SetBytes(internal.ReverseScalarBytes(bytes))
value.Mod(value, btcec.S256().N)
return &BenchScalar{
value,
}, nil
}
func (s *BenchScalar) SetBytesWide(bytes []byte) (Scalar, error) {
value := new(big.Int).SetBytes(internal.ReverseScalarBytes(bytes))
value.Mod(value, btcec.S256().N)
return &BenchScalar{
value,
}, nil
}
func (s *BenchScalar) Clone() Scalar {
return &BenchScalar{
value: new(big.Int).Set(s.value),
}
}
type BenchPoint struct {
x, y *big.Int
}
func (p *BenchPoint) Random(reader io.Reader) Point {
var k [32]byte
curve := btcec.S256()
_, _ = reader.Read(k[:])
x, y := curve.ScalarBaseMult(k[:])
for !curve.IsOnCurve(x, y) {
_, _ = reader.Read(k[:])
x, y = curve.ScalarBaseMult(k[:])
}
return &BenchPoint{x, y}
}
func (p *BenchPoint) Hash(bytes []byte) Point {
return nil
}
func (p *BenchPoint) Identity() Point {
return &BenchPoint{x: big.NewInt(0), y: big.NewInt(0)}
}
func (p *BenchPoint) Generator() Point {
return &BenchPoint{
x: new(big.Int).Set(btcec.S256().Gx),
y: new(big.Int).Set(btcec.S256().Gy),
}
}
func (p *BenchPoint) IsIdentity() bool {
return false
}
func (p *BenchPoint) IsNegative() bool {
return false
}
func (p *BenchPoint) IsOnCurve() bool {
return btcec.S256().IsOnCurve(p.x, p.y)
}
func (p *BenchPoint) Double() Point {
x, y := btcec.S256().Double(p.x, p.y)
return &BenchPoint{
x, y,
}
}
func (p *BenchPoint) Scalar() Scalar {
return &BenchScalar{value: big.NewInt(0)}
}
func (p *BenchPoint) Neg() Point {
y, _ := mod.Neg(p.y, btcec.S256().P)
return &BenchPoint{
x: new(big.Int).Set(p.x), y: y,
}
}
func (p *BenchPoint) Add(rhs Point) Point {
r := rhs.(*BenchPoint)
x, y := btcec.S256().Add(p.x, p.y, r.x, r.y)
return &BenchPoint{
x, y,
}
}
func (p *BenchPoint) Sub(rhs Point) Point {
t := rhs.Neg().(*BenchPoint)
return t.Add(p)
}
func (p *BenchPoint) Mul(rhs Scalar) Point {
k := rhs.Bytes()
x, y := btcec.S256().ScalarMult(p.x, p.y, k)
return &BenchPoint{
x, y,
}
}
func (p *BenchPoint) Equal(rhs Point) bool {
r := rhs.(*BenchPoint)
return p.x.Cmp(r.x) == 0 && p.y.Cmp(r.y) == 0
}
func (p *BenchPoint) Set(x, y *big.Int) (Point, error) {
return &BenchPoint{
x, y,
}, nil
}
func (p *BenchPoint) ToAffineCompressed() []byte {
return nil
}
func (p *BenchPoint) ToAffineUncompressed() []byte {
return nil
}
func (p *BenchPoint) FromAffineCompressed(bytes []byte) (Point, error) {
return nil, nil
}
func (p *BenchPoint) FromAffineUncompressed(bytes []byte) (Point, error) {
return nil, nil
}
func (p *BenchPoint) CurveName() string {
return btcec.S256().Name
}
func (p *BenchPoint) SumOfProducts(points []Point, scalars []Scalar) Point {
biScalars := make([]*big.Int, len(scalars))
for i := 0; i < len(scalars); i++ {
biScalars[i] = scalars[i].BigInt()
}
return sumOfProductsPippenger(points, biScalars)
}
//func rhsK256(x *big.Int) *big.Int {
// // y^2 = x^3 + B
// x3, _ := mod.Exp(x, big.NewInt(3), btcec.S256().P)
// x3.Add(x3, btcec.S256().B)
// return x3.ModSqrt(x3, btcec.S256().P)
//}

View File

@ -0,0 +1,669 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
"crypto/elliptic"
"fmt"
"io"
"math/big"
"sync"
"github.com/btcsuite/btcd/btcec"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/internal"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native"
secp256k1 "source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/k256"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/k256/fp"
"source.quilibrium.com/quilibrium/monorepo/nekryptology/pkg/core/curves/native/k256/fq"
)
var oldK256Initonce sync.Once
var oldK256 Koblitz256
type Koblitz256 struct {
*elliptic.CurveParams
}
func oldK256InitAll() {
curve := btcec.S256()
oldK256.CurveParams = new(elliptic.CurveParams)
oldK256.P = curve.P
oldK256.N = curve.N
oldK256.Gx = curve.Gx
oldK256.Gy = curve.Gy
oldK256.B = curve.B
oldK256.BitSize = curve.BitSize
oldK256.Name = K256Name
}
func K256Curve() *Koblitz256 {
oldK256Initonce.Do(oldK256InitAll)
return &oldK256
}
func (curve *Koblitz256) Params() *elliptic.CurveParams {
return curve.CurveParams
}
func (curve *Koblitz256) IsOnCurve(x, y *big.Int) bool {
_, err := secp256k1.K256PointNew().SetBigInt(x, y)
return err == nil
}
func (curve *Koblitz256) Add(x1, y1, x2, y2 *big.Int) (*big.Int, *big.Int) {
p1, err := secp256k1.K256PointNew().SetBigInt(x1, y1)
if err != nil {
return nil, nil
}
p2, err := secp256k1.K256PointNew().SetBigInt(x2, y2)
if err != nil {
return nil, nil
}
return p1.Add(p1, p2).BigInt()
}
func (curve *Koblitz256) Double(x1, y1 *big.Int) (*big.Int, *big.Int) {
p1, err := secp256k1.K256PointNew().SetBigInt(x1, y1)
if err != nil {
return nil, nil
}
return p1.Double(p1).BigInt()
}
func (curve *Koblitz256) ScalarMult(Bx, By *big.Int, k []byte) (*big.Int, *big.Int) {
p1, err := secp256k1.K256PointNew().SetBigInt(Bx, By)
if err != nil {
return nil, nil
}
var bytes [32]byte
copy(bytes[:], internal.ReverseScalarBytes(k))
s, err := fq.K256FqNew().SetBytes(&bytes)
if err != nil {
return nil, nil
}
return p1.Mul(p1, s).BigInt()
}
func (curve *Koblitz256) ScalarBaseMult(k []byte) (*big.Int, *big.Int) {
var bytes [32]byte
copy(bytes[:], internal.ReverseScalarBytes(k))
s, err := fq.K256FqNew().SetBytes(&bytes)
if err != nil {
return nil, nil
}
p1 := secp256k1.K256PointNew().Generator()
return p1.Mul(p1, s).BigInt()
}
type ScalarK256 struct {
value *native.Field
}
type PointK256 struct {
value *native.EllipticPoint
}
func (s *ScalarK256) Random(reader io.Reader) Scalar {
if reader == nil {
return nil
}
var seed [64]byte
_, _ = reader.Read(seed[:])
return s.Hash(seed[:])
}
func (s *ScalarK256) Hash(bytes []byte) Scalar {
dst := []byte("secp256k1_XMD:SHA-256_SSWU_RO_")
xmd := native.ExpandMsgXmd(native.EllipticPointHasherSha256(), bytes, dst, 48)
var t [64]byte
copy(t[:48], internal.ReverseScalarBytes(xmd))
return &ScalarK256{
value: fq.K256FqNew().SetBytesWide(&t),
}
}
func (s *ScalarK256) Zero() Scalar {
return &ScalarK256{
value: fq.K256FqNew().SetZero(),
}
}
func (s *ScalarK256) One() Scalar {
return &ScalarK256{
value: fq.K256FqNew().SetOne(),
}
}
func (s *ScalarK256) IsZero() bool {
return s.value.IsZero() == 1
}
func (s *ScalarK256) IsOne() bool {
return s.value.IsOne() == 1
}
func (s *ScalarK256) IsOdd() bool {
return s.value.Bytes()[0]&1 == 1
}
func (s *ScalarK256) IsEven() bool {
return s.value.Bytes()[0]&1 == 0
}
func (s *ScalarK256) New(value int) Scalar {
t := fq.K256FqNew()
v := big.NewInt(int64(value))
if value < 0 {
v.Mod(v, t.Params.BiModulus)
}
return &ScalarK256{
value: t.SetBigInt(v),
}
}
func (s *ScalarK256) Cmp(rhs Scalar) int {
r, ok := rhs.(*ScalarK256)
if ok {
return s.value.Cmp(r.value)
} else {
return -2
}
}
func (s *ScalarK256) Square() Scalar {
return &ScalarK256{
value: fq.K256FqNew().Square(s.value),
}
}
func (s *ScalarK256) Double() Scalar {
return &ScalarK256{
value: fq.K256FqNew().Double(s.value),
}
}
func (s *ScalarK256) Invert() (Scalar, error) {
value, wasInverted := fq.K256FqNew().Invert(s.value)
if !wasInverted {
return nil, fmt.Errorf("inverse doesn't exist")
}
return &ScalarK256{
value,
}, nil
}
func (s *ScalarK256) Sqrt() (Scalar, error) {
value, wasSquare := fq.K256FqNew().Sqrt(s.value)
if !wasSquare {
return nil, fmt.Errorf("not a square")
}
return &ScalarK256{
value,
}, nil
}
func (s *ScalarK256) Cube() Scalar {
value := fq.K256FqNew().Mul(s.value, s.value)
value.Mul(value, s.value)
return &ScalarK256{
value,
}
}
func (s *ScalarK256) Add(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarK256)
if ok {
return &ScalarK256{
value: fq.K256FqNew().Add(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarK256) Sub(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarK256)
if ok {
return &ScalarK256{
value: fq.K256FqNew().Sub(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarK256) Mul(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarK256)
if ok {
return &ScalarK256{
value: fq.K256FqNew().Mul(s.value, r.value),
}
} else {
return nil
}
}
func (s *ScalarK256) MulAdd(y, z Scalar) Scalar {
return s.Mul(y).Add(z)
}
func (s *ScalarK256) Div(rhs Scalar) Scalar {
r, ok := rhs.(*ScalarK256)
if ok {
v, wasInverted := fq.K256FqNew().Invert(r.value)
if !wasInverted {
return nil
}
v.Mul(v, s.value)
return &ScalarK256{value: v}
} else {
return nil
}
}
func (s *ScalarK256) Neg() Scalar {
return &ScalarK256{
value: fq.K256FqNew().Neg(s.value),
}
}
func (s *ScalarK256) SetBigInt(v *big.Int) (Scalar, error) {
if v == nil {
return nil, fmt.Errorf("'v' cannot be nil")
}
value := fq.K256FqNew().SetBigInt(v)
return &ScalarK256{
value,
}, nil
}
func (s *ScalarK256) BigInt() *big.Int {
return s.value.BigInt()
}
func (s *ScalarK256) Bytes() []byte {
t := s.value.Bytes()
return internal.ReverseScalarBytes(t[:])
}
func (s *ScalarK256) SetBytes(bytes []byte) (Scalar, error) {
if len(bytes) != 32 {
return nil, fmt.Errorf("invalid length")
}
var seq [32]byte
copy(seq[:], internal.ReverseScalarBytes(bytes))
value, err := fq.K256FqNew().SetBytes(&seq)
if err != nil {
return nil, err
}
return &ScalarK256{
value,
}, nil
}
func (s *ScalarK256) SetBytesWide(bytes []byte) (Scalar, error) {
if len(bytes) != 64 {
return nil, fmt.Errorf("invalid length")
}
var seq [64]byte
copy(seq[:], bytes)
return &ScalarK256{
value: fq.K256FqNew().SetBytesWide(&seq),
}, nil
}
func (s *ScalarK256) Point() Point {
return new(PointK256).Identity()
}
func (s *ScalarK256) Clone() Scalar {
return &ScalarK256{
value: fq.K256FqNew().Set(s.value),
}
}
func (s *ScalarK256) MarshalBinary() ([]byte, error) {
return scalarMarshalBinary(s)
}
func (s *ScalarK256) UnmarshalBinary(input []byte) error {
sc, err := scalarUnmarshalBinary(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarK256)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarK256) MarshalText() ([]byte, error) {
return scalarMarshalText(s)
}
func (s *ScalarK256) UnmarshalText(input []byte) error {
sc, err := scalarUnmarshalText(input)
if err != nil {
return err
}
ss, ok := sc.(*ScalarK256)
if !ok {
return fmt.Errorf("invalid scalar")
}
s.value = ss.value
return nil
}
func (s *ScalarK256) MarshalJSON() ([]byte, error) {
return scalarMarshalJson(s)
}
func (s *ScalarK256) UnmarshalJSON(input []byte) error {
sc, err := scalarUnmarshalJson(input)
if err != nil {
return err
}
S, ok := sc.(*ScalarK256)
if !ok {
return fmt.Errorf("invalid type")
}
s.value = S.value
return nil
}
func (p *PointK256) Random(reader io.Reader) Point {
var seed [64]byte
_, _ = reader.Read(seed[:])
return p.Hash(seed[:])
}
func (p *PointK256) Hash(bytes []byte) Point {
value, err := secp256k1.K256PointNew().Hash(bytes, native.EllipticPointHasherSha256())
// TODO: change hash to return an error also
if err != nil {
return nil
}
return &PointK256{value}
}
func (p *PointK256) Identity() Point {
return &PointK256{
value: secp256k1.K256PointNew().Identity(),
}
}
func (p *PointK256) Generator() Point {
return &PointK256{
value: secp256k1.K256PointNew().Generator(),
}
}
func (p *PointK256) IsIdentity() bool {
return p.value.IsIdentity()
}
func (p *PointK256) IsNegative() bool {
return p.value.GetY().Value[0]&1 == 1
}
func (p *PointK256) IsOnCurve() bool {
return p.value.IsOnCurve()
}
func (p *PointK256) Double() Point {
value := secp256k1.K256PointNew().Double(p.value)
return &PointK256{value}
}
func (p *PointK256) Scalar() Scalar {
return new(ScalarK256).Zero()
}
func (p *PointK256) Neg() Point {
value := secp256k1.K256PointNew().Neg(p.value)
return &PointK256{value}
}
func (p *PointK256) Add(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointK256)
if ok {
value := secp256k1.K256PointNew().Add(p.value, r.value)
return &PointK256{value}
} else {
return nil
}
}
func (p *PointK256) Sub(rhs Point) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*PointK256)
if ok {
value := secp256k1.K256PointNew().Sub(p.value, r.value)
return &PointK256{value}
} else {
return nil
}
}
func (p *PointK256) Mul(rhs Scalar) Point {
if rhs == nil {
return nil
}
r, ok := rhs.(*ScalarK256)
if ok {
value := secp256k1.K256PointNew().Mul(p.value, r.value)
return &PointK256{value}
} else {
return nil
}
}
func (p *PointK256) Equal(rhs Point) bool {
r, ok := rhs.(*PointK256)
if ok {
return p.value.Equal(r.value) == 1
} else {
return false
}
}
func (p *PointK256) Set(x, y *big.Int) (Point, error) {
value, err := secp256k1.K256PointNew().SetBigInt(x, y)
if err != nil {
return nil, err
}
return &PointK256{value}, nil
}
func (p *PointK256) ToAffineCompressed() []byte {
var x [33]byte
x[0] = byte(2)
t := secp256k1.K256PointNew().ToAffine(p.value)
x[0] |= t.Y.Bytes()[0] & 1
xBytes := t.X.Bytes()
copy(x[1:], internal.ReverseScalarBytes(xBytes[:]))
return x[:]
}
func (p *PointK256) ToAffineUncompressed() []byte {
var out [65]byte
out[0] = byte(4)
t := secp256k1.K256PointNew().ToAffine(p.value)
arr := t.X.Bytes()
copy(out[1:33], internal.ReverseScalarBytes(arr[:]))
arr = t.Y.Bytes()
copy(out[33:], internal.ReverseScalarBytes(arr[:]))
return out[:]
}
func (p *PointK256) FromAffineCompressed(bytes []byte) (Point, error) {
var raw [native.FieldBytes]byte
if len(bytes) != 33 {
return nil, fmt.Errorf("invalid byte sequence")
}
sign := int(bytes[0])
if sign != 2 && sign != 3 {
return nil, fmt.Errorf("invalid sign byte")
}
sign &= 0x1
copy(raw[:], internal.ReverseScalarBytes(bytes[1:]))
x, err := fp.K256FpNew().SetBytes(&raw)
if err != nil {
return nil, err
}
value := secp256k1.K256PointNew().Identity()
rhs := fp.K256FpNew()
p.value.Arithmetic.RhsEq(rhs, x)
// test that rhs is quadratic residue
// if not, then this Point is at infinity
y, wasQr := fp.K256FpNew().Sqrt(rhs)
if wasQr {
// fix the sign
sigY := int(y.Bytes()[0] & 1)
if sigY != sign {
y.Neg(y)
}
value.X = x
value.Y = y
value.Z.SetOne()
}
return &PointK256{value}, nil
}
func (p *PointK256) FromAffineUncompressed(bytes []byte) (Point, error) {
var arr [native.FieldBytes]byte
if len(bytes) != 65 {
return nil, fmt.Errorf("invalid byte sequence")
}
if bytes[0] != 4 {
return nil, fmt.Errorf("invalid sign byte")
}
copy(arr[:], internal.ReverseScalarBytes(bytes[1:33]))
x, err := fp.K256FpNew().SetBytes(&arr)
if err != nil {
return nil, err
}
copy(arr[:], internal.ReverseScalarBytes(bytes[33:]))
y, err := fp.K256FpNew().SetBytes(&arr)
if err != nil {
return nil, err
}
value := secp256k1.K256PointNew()
value.X = x
value.Y = y
value.Z.SetOne()
return &PointK256{value}, nil
}
func (p *PointK256) CurveName() string {
return p.value.Params.Name
}
func (p *PointK256) SumOfProducts(points []Point, scalars []Scalar) Point {
nPoints := make([]*native.EllipticPoint, len(points))
nScalars := make([]*native.Field, len(scalars))
for i, pt := range points {
ptv, ok := pt.(*PointK256)
if !ok {
return nil
}
nPoints[i] = ptv.value
}
for i, sc := range scalars {
s, ok := sc.(*ScalarK256)
if !ok {
return nil
}
nScalars[i] = s.value
}
value := secp256k1.K256PointNew()
_, err := value.SumOfProducts(nPoints, nScalars)
if err != nil {
return nil
}
return &PointK256{value}
}
func (p *PointK256) X() *native.Field {
return p.value.GetX()
}
func (p *PointK256) Y() *native.Field {
return p.value.GetY()
}
func (p *PointK256) Params() *elliptic.CurveParams {
return K256Curve().Params()
}
func (p *PointK256) MarshalBinary() ([]byte, error) {
return pointMarshalBinary(p)
}
func (p *PointK256) UnmarshalBinary(input []byte) error {
pt, err := pointUnmarshalBinary(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointK256)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointK256) MarshalText() ([]byte, error) {
return pointMarshalText(p)
}
func (p *PointK256) UnmarshalText(input []byte) error {
pt, err := pointUnmarshalText(input)
if err != nil {
return err
}
ppt, ok := pt.(*PointK256)
if !ok {
return fmt.Errorf("invalid point")
}
p.value = ppt.value
return nil
}
func (p *PointK256) MarshalJSON() ([]byte, error) {
return pointMarshalJson(p)
}
func (p *PointK256) UnmarshalJSON(input []byte) error {
pt, err := pointUnmarshalJson(input)
if err != nil {
return err
}
P, ok := pt.(*PointK256)
if !ok {
return fmt.Errorf("invalid type")
}
p.value = P.value
return nil
}

View File

@ -0,0 +1,421 @@
//
// Copyright Coinbase, Inc. All Rights Reserved.
//
// SPDX-License-Identifier: Apache-2.0
//
package curves
import (
crand "crypto/rand"
"math/big"
"sync"
"testing"
"github.com/btcsuite/btcd/btcec"
"github.com/stretchr/testify/require"
)
type mockReader struct {
index int
seed []byte
}
var mockRngInitonce sync.Once
var mockRng mockReader
func newMockReader() {
mockRng.index = 0
mockRng.seed = make([]byte, 32)
for i := range mockRng.seed {
mockRng.seed[i] = 1
}
}
func testRng() *mockReader {
mockRngInitonce.Do(newMockReader)
return &mockRng
}
func (m *mockReader) Read(p []byte) (n int, err error) {
limit := len(m.seed)
for i := range p {
p[i] = m.seed[m.index]
m.index += 1
m.index %= limit
}
n = len(p)
err = nil
return
}
func TestScalarK256Random(t *testing.T) {
curve := K256()
sc := curve.Scalar.Random(testRng())
s, ok := sc.(*ScalarK256)
require.True(t, ok)
expected, _ := new(big.Int).SetString("2f71aaec5e14d747c72e46cdcaffffe6f542f38b3f0925469ceb24ac1c65885d", 16)
require.Equal(t, s.value.BigInt(), expected)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := curve.Scalar.Random(crand.Reader)
_, ok := sc.(*ScalarK256)
require.True(t, ok)
require.True(t, !sc.IsZero())
}
}
func TestScalarK256Hash(t *testing.T) {
var b [32]byte
k256 := K256()
sc := k256.Scalar.Hash(b[:])
s, ok := sc.(*ScalarK256)
require.True(t, ok)
expected, _ := new(big.Int).SetString("e5cb3500b809a8202de0834a805068bc21bde09bd6367815e7523a37adf8f52e", 16)
require.Equal(t, s.value.BigInt(), expected)
}
func TestScalarK256Zero(t *testing.T) {
k256 := K256()
sc := k256.Scalar.Zero()
require.True(t, sc.IsZero())
require.True(t, sc.IsEven())
}
func TestScalarK256One(t *testing.T) {
k256 := K256()
sc := k256.Scalar.One()
require.True(t, sc.IsOne())
require.True(t, sc.IsOdd())
}
func TestScalarK256New(t *testing.T) {
k256 := K256()
three := k256.Scalar.New(3)
require.True(t, three.IsOdd())
four := k256.Scalar.New(4)
require.True(t, four.IsEven())
neg1 := k256.Scalar.New(-1)
require.True(t, neg1.IsEven())
neg2 := k256.Scalar.New(-2)
require.True(t, neg2.IsOdd())
}
func TestScalarK256Square(t *testing.T) {
k256 := K256()
three := k256.Scalar.New(3)
nine := k256.Scalar.New(9)
require.Equal(t, three.Square().Cmp(nine), 0)
}
func TestScalarK256Cube(t *testing.T) {
k256 := K256()
three := k256.Scalar.New(3)
twentySeven := k256.Scalar.New(27)
require.Equal(t, three.Cube().Cmp(twentySeven), 0)
}
func TestScalarK256Double(t *testing.T) {
k256 := K256()
three := k256.Scalar.New(3)
six := k256.Scalar.New(6)
require.Equal(t, three.Double().Cmp(six), 0)
}
func TestScalarK256Neg(t *testing.T) {
k256 := K256()
one := k256.Scalar.One()
neg1 := k256.Scalar.New(-1)
require.Equal(t, one.Neg().Cmp(neg1), 0)
lotsOfThrees := k256.Scalar.New(333333)
expected := k256.Scalar.New(-333333)
require.Equal(t, lotsOfThrees.Neg().Cmp(expected), 0)
}
func TestScalarK256Invert(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
actual, _ := nine.Invert()
sa, _ := actual.(*ScalarK256)
bn, _ := new(big.Int).SetString("8e38e38e38e38e38e38e38e38e38e38d842841d57dd303af6a9150f8e5737996", 16)
expected, err := k256.Scalar.SetBigInt(bn)
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarK256Sqrt(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
actual, err := nine.Sqrt()
sa, _ := actual.(*ScalarK256)
expected := k256.Scalar.New(3)
require.NoError(t, err)
require.Equal(t, sa.Cmp(expected), 0)
}
func TestScalarK256Add(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
six := k256.Scalar.New(6)
fifteen := nine.Add(six)
require.NotNil(t, fifteen)
expected := k256.Scalar.New(15)
require.Equal(t, expected.Cmp(fifteen), 0)
n := new(big.Int).Set(btcec.S256().N)
n.Sub(n, big.NewInt(3))
upper, err := k256.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := upper.Add(nine)
require.NotNil(t, actual)
require.Equal(t, actual.Cmp(six), 0)
}
func TestScalarK256Sub(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
six := k256.Scalar.New(6)
n := new(big.Int).Set(btcec.S256().N)
n.Sub(n, big.NewInt(3))
expected, err := k256.Scalar.SetBigInt(n)
require.NoError(t, err)
actual := six.Sub(nine)
require.Equal(t, expected.Cmp(actual), 0)
actual = nine.Sub(six)
require.Equal(t, actual.Cmp(k256.Scalar.New(3)), 0)
}
func TestScalarK256Mul(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
six := k256.Scalar.New(6)
actual := nine.Mul(six)
require.Equal(t, actual.Cmp(k256.Scalar.New(54)), 0)
n := new(big.Int).Set(btcec.S256().N)
n.Sub(n, big.NewInt(1))
upper, err := k256.Scalar.SetBigInt(n)
require.NoError(t, err)
require.Equal(t, upper.Mul(upper).Cmp(k256.Scalar.New(1)), 0)
}
func TestScalarK256Div(t *testing.T) {
k256 := K256()
nine := k256.Scalar.New(9)
actual := nine.Div(nine)
require.Equal(t, actual.Cmp(k256.Scalar.New(1)), 0)
require.Equal(t, k256.Scalar.New(54).Div(nine).Cmp(k256.Scalar.New(6)), 0)
}
func TestScalarK256Serialize(t *testing.T) {
k256 := K256()
sc := k256.Scalar.New(255)
sequence := sc.Bytes()
require.Equal(t, len(sequence), 32)
require.Equal(t, sequence, []byte{0x00, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff})
ret, err := k256.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
// Try 10 random values
for i := 0; i < 10; i++ {
sc = k256.Scalar.Random(crand.Reader)
sequence = sc.Bytes()
require.Equal(t, len(sequence), 32)
ret, err = k256.Scalar.SetBytes(sequence)
require.NoError(t, err)
require.Equal(t, ret.Cmp(sc), 0)
}
}
func TestScalarK256Nil(t *testing.T) {
k256 := K256()
one := k256.Scalar.New(1)
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, one.Div(nil))
require.Nil(t, k256.Scalar.Random(nil))
require.Equal(t, one.Cmp(nil), -2)
_, err := k256.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointK256Random(t *testing.T) {
curve := K256()
sc := curve.Point.Random(testRng())
s, ok := sc.(*PointK256)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("c6e18a1d7cf834462675b31581639a18e14fd0f73f8dfd5fe2993f88f6fbe008", 16)
expectedY, _ := new(big.Int).SetString("b65fab3243c5d07cef005d7fb335ebe8019efd954e95e68c86ef9b3bd7bccd36", 16)
require.Equal(t, s.X().BigInt(), expectedX)
require.Equal(t, s.Y().BigInt(), expectedY)
// Try 10 random values
for i := 0; i < 10; i++ {
sc := curve.Point.Random(crand.Reader)
_, ok := sc.(*PointK256)
require.True(t, ok)
require.True(t, !sc.IsIdentity())
}
}
func TestPointK256Hash(t *testing.T) {
var b [32]byte
curve := K256()
sc := curve.Point.Hash(b[:])
s, ok := sc.(*PointK256)
require.True(t, ok)
expectedX, _ := new(big.Int).SetString("95d0ad42f68ddb5a808469dd75fa866890dcc7d039844e0e2d58a6d25bd9a66b", 16)
expectedY, _ := new(big.Int).SetString("f37c564d05168dab4413caacdb8e3426143fc5fb24a470ccd8a51856c11d163c", 16)
require.Equal(t, s.X().BigInt(), expectedX)
require.Equal(t, s.Y().BigInt(), expectedY)
}
func TestPointK256Identity(t *testing.T) {
k256 := K256()
sc := k256.Point.Identity()
require.True(t, sc.IsIdentity())
require.Equal(t, sc.ToAffineCompressed(), []byte{2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0})
}
func TestPointK256Generator(t *testing.T) {
curve := K256()
sc := curve.Point.Generator()
s, ok := sc.(*PointK256)
require.True(t, ok)
require.Equal(t, s.X().BigInt().Cmp(btcec.S256().Gx), 0)
require.Equal(t, s.Y().BigInt().Cmp(btcec.S256().Gy), 0)
}
func TestPointK256Set(t *testing.T) {
k256 := K256()
iden, err := k256.Point.Set(big.NewInt(0), big.NewInt(0))
require.NoError(t, err)
require.True(t, iden.IsIdentity())
_, err = k256.Point.Set(btcec.S256().Gx, btcec.S256().Gy)
require.NoError(t, err)
}
func TestPointK256Double(t *testing.T) {
curve := K256()
g := curve.Point.Generator()
g2 := g.Double()
require.True(t, g2.Equal(g.Mul(curve.Scalar.New(2))))
i := curve.Point.Identity()
require.True(t, i.Double().Equal(i))
gg := curve.Point.Generator().Add(curve.Point.Generator())
require.True(t, g2.Equal(gg))
}
func TestPointK256Neg(t *testing.T) {
k256 := K256()
g := k256.Point.Generator().Neg()
require.True(t, g.Neg().Equal(k256.Point.Generator()))
require.True(t, k256.Point.Identity().Neg().Equal(k256.Point.Identity()))
}
func TestPointK256Add(t *testing.T) {
curve := K256()
pt := curve.Point.Generator().(*PointK256)
pt1 := pt.Add(pt).(*PointK256)
pt2 := pt.Double().(*PointK256)
pt3 := pt.Mul(curve.Scalar.New(2)).(*PointK256)
require.True(t, pt1.Equal(pt2))
require.True(t, pt1.Equal(pt3))
require.True(t, pt.Add(pt).Equal(pt.Double()))
require.True(t, pt.Mul(curve.Scalar.New(3)).Equal(pt.Add(pt).Add(pt)))
}
func TestPointK256Sub(t *testing.T) {
curve := K256()
g := curve.Point.Generator()
pt := curve.Point.Generator().Mul(curve.Scalar.New(4))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Equal(g))
require.True(t, pt.Sub(g).Sub(g).Sub(g).Sub(g).IsIdentity())
}
func TestPointK256Mul(t *testing.T) {
curve := K256()
g := curve.Point.Generator()
pt := curve.Point.Generator().Mul(curve.Scalar.New(4))
require.True(t, g.Double().Double().Equal(pt))
}
func TestPointK256Serialize(t *testing.T) {
curve := K256()
ss := curve.Scalar.Random(testRng())
g := curve.Point.Generator()
ppt := g.Mul(ss).(*PointK256)
require.Equal(t, ppt.ToAffineCompressed(), []byte{0x2, 0x1b, 0xa7, 0x7e, 0x98, 0xd6, 0xd8, 0x49, 0x45, 0xa4, 0x75, 0xd8, 0x6, 0xc0, 0x94, 0x5b, 0x8c, 0xf0, 0x5b, 0x8a, 0xb2, 0x76, 0xbb, 0x9f, 0x6e, 0x52, 0x9a, 0x11, 0x9c, 0x79, 0xdd, 0xf6, 0x5a})
require.Equal(t, ppt.ToAffineUncompressed(), []byte{0x4, 0x1b, 0xa7, 0x7e, 0x98, 0xd6, 0xd8, 0x49, 0x45, 0xa4, 0x75, 0xd8, 0x6, 0xc0, 0x94, 0x5b, 0x8c, 0xf0, 0x5b, 0x8a, 0xb2, 0x76, 0xbb, 0x9f, 0x6e, 0x52, 0x9a, 0x11, 0x9c, 0x79, 0xdd, 0xf6, 0x5a, 0xb2, 0x96, 0x7c, 0x59, 0x4, 0xeb, 0x9a, 0xaa, 0xa9, 0x1d, 0x4d, 0xd0, 0x2d, 0xc6, 0x37, 0xee, 0x4a, 0x95, 0x51, 0x60, 0xab, 0xab, 0xf7, 0xdb, 0x30, 0x7d, 0x7d, 0x0, 0x68, 0x6c, 0xcf, 0xf6})
retP, err := ppt.FromAffineCompressed(ppt.ToAffineCompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
retP, err = ppt.FromAffineUncompressed(ppt.ToAffineUncompressed())
require.NoError(t, err)
require.True(t, ppt.Equal(retP))
// smoke test
for i := 0; i < 25; i++ {
s := curve.Scalar.Random(crand.Reader)
pt := g.Mul(s)
cmprs := pt.ToAffineCompressed()
require.Equal(t, len(cmprs), 33)
retC, err := pt.FromAffineCompressed(cmprs)
require.NoError(t, err)
require.True(t, pt.Equal(retC))
un := pt.ToAffineUncompressed()
require.Equal(t, len(un), 65)
retU, err := pt.FromAffineUncompressed(un)
require.NoError(t, err)
require.True(t, pt.Equal(retU))
}
}
func TestPointK256Nil(t *testing.T) {
k256 := K256()
one := k256.Point.Generator()
require.Nil(t, one.Add(nil))
require.Nil(t, one.Sub(nil))
require.Nil(t, one.Mul(nil))
require.Nil(t, k256.Scalar.Random(nil))
require.False(t, one.Equal(nil))
_, err := k256.Scalar.SetBigInt(nil)
require.Error(t, err)
}
func TestPointK256SumOfProducts(t *testing.T) {
lhs := new(PointK256).Generator().Mul(new(ScalarK256).New(50))
points := make([]Point, 5)
for i := range points {
points[i] = new(PointK256).Generator()
}
scalars := []Scalar{
new(ScalarK256).New(8),
new(ScalarK256).New(9),
new(ScalarK256).New(10),
new(ScalarK256).New(11),
new(ScalarK256).New(12),
}
rhs := lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
for j := 0; j < 25; j++ {
lhs = lhs.Identity()
for i := range points {
points[i] = new(PointK256).Random(crand.Reader)
scalars[i] = new(ScalarK256).Random(crand.Reader)
lhs = lhs.Add(points[i].Mul(scalars[i]))
}
rhs = lhs.SumOfProducts(points, scalars)
require.NotNil(t, rhs)
require.True(t, lhs.Equal(rhs))
}
}

Some files were not shown because too many files have changed in this diff Show More