Skip to content

dlog v1 finalization: replace TokenDataWitness with Metadata #1033

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Apr 15, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions token/core/zkatdlog/nogh/v1/issue/issue_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ func prepareZKIssue(t *testing.T) (*issue2.Prover, *issue2.Verifier) {
return prover, verifier
}

func prepareInputsForZKIssue(pp *v1.PublicParams) ([]*token.TokenDataWitness, []*math.G1) {
func prepareInputsForZKIssue(pp *v1.PublicParams) ([]*token.Metadata, []*math.G1) {
values := make([]uint64, 2)
values[0] = 120
values[1] = 190
Expand All @@ -50,5 +50,5 @@ func prepareInputsForZKIssue(pp *v1.PublicParams) ([]*token.TokenDataWitness, []
for i := 0; i < len(values); i++ {
tokens[i] = NewToken(curve.NewZrFromInt(int64(values[i])), bf[i], "ABC", pp.PedersenGenerators, curve)
}
return token.NewTokenDataWitness("ABC", values, bf), tokens
return token.NewMetadata(pp.Curve, "ABC", values, bf), tokens
}
2 changes: 1 addition & 1 deletion token/core/zkatdlog/nogh/v1/issue/issuer.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ func (i *Issuer) GenerateZKIssue(values []uint64, owners [][]byte) (*Action, []*
for j := 0; j < len(inf); j++ {
inf[j] = &token.Metadata{
Type: i.Type,
Value: math.Curves[i.PublicParams.Curve].NewZrFromUint64(tw[j].Value),
Value: tw[j].Value,
BlindingFactor: tw[j].BlindingFactor,
Issuer: signerRaw,
}
Expand Down
7 changes: 5 additions & 2 deletions token/core/zkatdlog/nogh/v1/issue/prover.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ type Prover struct {
RangeCorrectness *rp.RangeCorrectnessProver
}

func NewProver(tw []*token.TokenDataWitness, tokens []*math.G1, pp *v1.PublicParams) (*Prover, error) {
func NewProver(tw []*token.Metadata, tokens []*math.G1, pp *v1.PublicParams) (*Prover, error) {
c := math.Curves[pp.Curve]
p := &Prover{}
tokenType := c.HashToZr([]byte(tw[0].Type))
Expand All @@ -64,7 +64,10 @@ func NewProver(tw []*token.TokenDataWitness, tokens []*math.G1, pp *v1.PublicPar
return nil, errors.New("invalid token witness")
}
// tw[i] = tw[i].Clone()
values[i] = tw[i].Value
values[i], err = tw[i].Value.Uint()
if err != nil {
return nil, errors.Wrapf(err, "invalid token witness values")
}
blindingFactors[i] = c.ModSub(tw[i].BlindingFactor, p.SameType.blindingFactor, c.GroupOrder)
}
coms := make([]*math.G1, len(tokens))
Expand Down
61 changes: 27 additions & 34 deletions token/core/zkatdlog/nogh/v1/token/token.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import (
"github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/protos-go/utils"
noghv1 "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/setup"
"github.com/hyperledger-labs/fabric-token-sdk/token/services/tokens/core/comm"
token2 "github.com/hyperledger-labs/fabric-token-sdk/token/token"
"github.com/hyperledger-labs/fabric-token-sdk/token/token"
"github.com/pkg/errors"
)

Expand Down Expand Up @@ -66,7 +66,7 @@ func (t *Token) Deserialize(bytes []byte) error {
}

// ToClear returns Token in the clear
func (t *Token) ToClear(meta *Metadata, pp *noghv1.PublicParams) (*token2.Token, error) {
func (t *Token) ToClear(meta *Metadata, pp *noghv1.PublicParams) (*token.Token, error) {
com, err := commit([]*math.Zr{math.Curves[pp.Curve].HashToZr([]byte(meta.Type)), meta.Value, meta.BlindingFactor}, pp.PedersenGenerators, math.Curves[pp.Curve])
if err != nil {
return nil, errors.Wrap(err, "cannot retrieve token in the clear: failed to check token data")
Expand All @@ -75,7 +75,7 @@ func (t *Token) ToClear(meta *Metadata, pp *noghv1.PublicParams) (*token2.Token,
if !com.Equals(t.Data) {
return nil, errors.New("cannot retrieve token in the clear: output does not match provided opening")
}
return &token2.Token{
return &token.Token{
Type: meta.Type,
Quantity: "0x" + meta.Value.String(),
Owner: t.Owner,
Expand All @@ -92,12 +92,12 @@ func (t *Token) Validate(checkOwner bool) error {
return nil
}

func computeTokens(tw []*TokenDataWitness, pp []*math.G1, c *math.Curve) ([]*math.G1, error) {
func computeTokens(tw []*Metadata, pp []*math.G1, c *math.Curve) ([]*math.G1, error) {
tokens := make([]*math.G1, len(tw))
var err error
for i := 0; i < len(tw); i++ {
hash := c.HashToZr([]byte(tw[i].Type))
tokens[i], err = commit([]*math.Zr{hash, c.NewZrFromUint64(tw[i].Value), tw[i].BlindingFactor}, pp, c)
tokens[i], err = commit([]*math.Zr{hash, tw[i].Value, tw[i].BlindingFactor}, pp, c)
if err != nil {
return nil, errors.WithMessagef(err, "failed to compute token [%d]", i)
}
Expand All @@ -106,20 +106,20 @@ func computeTokens(tw []*TokenDataWitness, pp []*math.G1, c *math.Curve) ([]*mat
return tokens, nil
}

func GetTokensWithWitness(values []uint64, ttype token2.Type, pp []*math.G1, c *math.Curve) ([]*math.G1, []*TokenDataWitness, error) {
func GetTokensWithWitness(values []uint64, tokenType token.Type, pp []*math.G1, c *math.Curve) ([]*math.G1, []*Metadata, error) {
if c == nil {
return nil, nil, errors.New("cannot get tokens with witness: please initialize curve")
}
rand, err := c.Rand()
if err != nil {
return nil, nil, errors.Wrap(err, "cannot get tokens with witness")
}
tw := make([]*TokenDataWitness, len(values))
tw := make([]*Metadata, len(values))
for i, v := range values {
tw[i] = &TokenDataWitness{
tw[i] = &Metadata{
BlindingFactor: c.NewRandomZr(rand),
Value: v,
Type: ttype,
Value: c.NewZrFromUint64(v),
Type: tokenType,
}
}
tokens, err := computeTokens(tw, pp, c)
Expand All @@ -132,6 +132,16 @@ func GetTokensWithWitness(values []uint64, ttype token2.Type, pp []*math.G1, c *
// Metadata contains the metadata of a token
type Metadata comm.Metadata

// NewMetadata returns an array of Metadata that corresponds to the passed arguments
func NewMetadata(curve math.CurveID, tokenType token.Type, values []uint64, bfs []*math.Zr) []*Metadata {
witness := make([]*Metadata, len(values))
for i, v := range values {
witness[i] = &Metadata{Value: math.Curves[curve].NewZrFromUint64(v), BlindingFactor: bfs[i]}
}
witness[0].Type = tokenType
return witness
}

// Deserialize un-marshals Metadata
func (m *Metadata) Deserialize(b []byte) error {
typed, err := comm.UnmarshalTypedToken(b)
Expand All @@ -142,7 +152,7 @@ func (m *Metadata) Deserialize(b []byte) error {
if err := proto.Unmarshal(typed.Token, metadata); err != nil {
return errors.Wrapf(err, "failed unmarshalling metadata")
}
m.Type = token2.Type(metadata.Type)
m.Type = token.Type(metadata.Type)
m.Value, err = utils.FromZrProto(metadata.Value)
if err != nil {
return errors.Wrapf(err, "failed to deserialize metadata")
Expand Down Expand Up @@ -179,30 +189,13 @@ func (m *Metadata) Serialize() ([]byte, error) {
return comm.WrapMetadataWithType(raw)
}

// TokenDataWitness contains the opening of Data in Token
type TokenDataWitness struct {
Type token2.Type
Value uint64
BlindingFactor *math.Zr
}

// Clone produces a copy of TokenDataWitness
func (tdw *TokenDataWitness) Clone() *TokenDataWitness {
return &TokenDataWitness{
Type: tdw.Type,
Value: tdw.Value,
BlindingFactor: tdw.BlindingFactor.Copy(),
}
}

// NewTokenDataWitness returns an array of TokenDataWitness that corresponds to the passed arguments
func NewTokenDataWitness(ttype token2.Type, values []uint64, bfs []*math.Zr) []*TokenDataWitness {
witness := make([]*TokenDataWitness, len(values))
for i, v := range values {
witness[i] = &TokenDataWitness{Value: v, BlindingFactor: bfs[i]}
func (m *Metadata) Clone() *Metadata {
return &Metadata{
Type: m.Type,
BlindingFactor: m.BlindingFactor,
Issuer: m.Issuer,
Value: m.Value,
}
witness[0].Type = ttype
return witness
}

func commit(vector []*math.Zr, generators []*math.G1, c *math.Curve) (*math.G1, error) {
Expand Down
13 changes: 4 additions & 9 deletions token/core/zkatdlog/nogh/v1/transfer/sender.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,18 +58,13 @@ func (s *Sender) GenerateZKTransfer(ctx context.Context, values []uint64, owners
}
span.AddEvent("get_token_data")
in := getTokenData(s.Inputs)
intw := make([]*token.TokenDataWitness, len(s.InputInformation))
intw := make([]*token.Metadata, len(s.InputInformation))
for i := 0; i < len(s.InputInformation); i++ {
if s.InputInformation[0].Type != s.InputInformation[i].Type {
return nil, nil, errors.New("cannot generate transfer: please choose inputs of the same token type")
}
v, err := s.InputInformation[i].Value.Uint()
if err != nil {
return nil, nil, errors.New("cannot generate transfer: invalid value")
}

intw[i] = &token.TokenDataWitness{
Value: v,
intw[i] = &token.Metadata{
Value: s.InputInformation[i].Value,
Type: s.InputInformation[i].Type,
BlindingFactor: s.InputInformation[i].BlindingFactor,
}
Expand Down Expand Up @@ -99,7 +94,7 @@ func (s *Sender) GenerateZKTransfer(ctx context.Context, values []uint64, owners
for i := 0; i < len(inf); i++ {
inf[i] = &token.Metadata{
Type: s.InputInformation[0].Type,
Value: math.Curves[s.PublicParams.Curve].NewZrFromUint64(outtw[i].Value),
Value: outtw[i].Value,
BlindingFactor: outtw[i].BlindingFactor,
}
}
Expand Down
25 changes: 20 additions & 5 deletions token/core/zkatdlog/nogh/v1/transfer/transfer.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,11 @@ type Prover struct {
}

// NewProver returns a Action Prover that corresponds to the passed arguments
func NewProver(inputWitness, outputWitness []*token.TokenDataWitness, inputs, outputs []*math.G1, pp *v1.PublicParams) (*Prover, error) {
func NewProver(inputWitness, outputWitness []*token.Metadata, inputs, outputs []*math.G1, pp *v1.PublicParams) (*Prover, error) {
c := math.Curves[pp.Curve]
p := &Prover{}
inW := make([]*token.TokenDataWitness, len(inputWitness))
outW := make([]*token.TokenDataWitness, len(outputWitness))
inW := make([]*token.Metadata, len(inputWitness))
outW := make([]*token.Metadata, len(outputWitness))
for i := 0; i < len(inputWitness); i++ {
if inputWitness[i] == nil || inputWitness[i].BlindingFactor == nil {
return nil, errors.New("invalid token witness")
Expand All @@ -92,7 +92,10 @@ func NewProver(inputWitness, outputWitness []*token.TokenDataWitness, inputs, ou
return nil, errors.New("invalid token witness")
}
outW[i] = outputWitness[i].Clone()
values[i] = outW[i].Value
values[i], err = outW[i].Value.Uint()
if err != nil {
return nil, errors.Wrapf(err, "invalid token witness values")
}
blindingFactors[i] = c.ModSub(outW[i].BlindingFactor, typeBF, c.GroupOrder)
}
commitmentToType.Add(pp.PedersenGenerators[2].Mul(typeBF))
Expand All @@ -107,7 +110,19 @@ func NewProver(inputWitness, outputWitness []*token.TokenDataWitness, inputs, ou
coms[i] = outputs[i].Copy()
coms[i].Sub(commitmentToType)
}
p.RangeCorrectness = rp.NewRangeCorrectnessProver(coms, values, blindingFactors, pp.PedersenGenerators[1:], pp.RangeProofParams.LeftGenerators, pp.RangeProofParams.RightGenerators, pp.RangeProofParams.P, pp.RangeProofParams.Q, pp.RangeProofParams.BitLength, pp.RangeProofParams.NumberOfRounds, math.Curves[pp.Curve])
p.RangeCorrectness = rp.NewRangeCorrectnessProver(
coms,
values,
blindingFactors,
pp.PedersenGenerators[1:],
pp.RangeProofParams.LeftGenerators,
pp.RangeProofParams.RightGenerators,
pp.RangeProofParams.P,
pp.RangeProofParams.Q,
pp.RangeProofParams.BitLength,
pp.RangeProofParams.NumberOfRounds,
math.Curves[pp.Curve],
)

}
return p, nil
Expand Down
20 changes: 10 additions & 10 deletions token/core/zkatdlog/nogh/v1/transfer/transfer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ func prepareZKTransferWithInvalidRange() (*transfer.Prover, *transfer.Verifier)
return prover, verifier
}

func prepareInputsForZKTransfer(pp *v1.PublicParams) ([]*token.TokenDataWitness, []*token.TokenDataWitness, []*math.G1, []*math.G1) {
func prepareInputsForZKTransfer(pp *v1.PublicParams) ([]*token.Metadata, []*token.Metadata, []*math.G1, []*math.G1) {
c := math.Curves[pp.Curve]
rand, err := c.Rand()
Expect(err).NotTo(HaveOccurred())
Expand All @@ -145,20 +145,20 @@ func prepareInputsForZKTransfer(pp *v1.PublicParams) ([]*token.TokenDataWitness,
outValues[1] = 20

in, out := prepareInputsOutputs(inValues, outValues, inBF, outBF, ttype, pp.PedersenGenerators, c)
intw := make([]*token.TokenDataWitness, len(inValues))
intw := make([]*token.Metadata, len(inValues))
for i := 0; i < len(intw); i++ {
intw[i] = &token.TokenDataWitness{BlindingFactor: inBF[i], Value: inValues[i], Type: ttype}
intw[i] = &token.Metadata{BlindingFactor: inBF[i], Value: c.NewZrFromUint64(inValues[i]), Type: ttype}
}

outtw := make([]*token.TokenDataWitness, len(outValues))
outtw := make([]*token.Metadata, len(outValues))
for i := 0; i < len(outtw); i++ {
outtw[i] = &token.TokenDataWitness{BlindingFactor: outBF[i], Value: outValues[i], Type: ttype}
outtw[i] = &token.Metadata{BlindingFactor: outBF[i], Value: c.NewZrFromUint64(outValues[i]), Type: ttype}
}

return intw, outtw, in, out
}

func prepareInvalidInputsForZKTransfer(pp *v1.PublicParams) ([]*token.TokenDataWitness, []*token.TokenDataWitness, []*math.G1, []*math.G1) {
func prepareInvalidInputsForZKTransfer(pp *v1.PublicParams) ([]*token.Metadata, []*token.Metadata, []*math.G1, []*math.G1) {
c := math.Curves[pp.Curve]
rand, err := c.Rand()
Expect(err).NotTo(HaveOccurred())
Expand All @@ -180,14 +180,14 @@ func prepareInvalidInputsForZKTransfer(pp *v1.PublicParams) ([]*token.TokenDataW
outValues[1] = 45

in, out := prepareInputsOutputs(inValues, outValues, inBF, outBF, ttype, pp.PedersenGenerators, c)
intw := make([]*token.TokenDataWitness, len(inValues))
intw := make([]*token.Metadata, len(inValues))
for i := 0; i < len(intw); i++ {
intw[i] = &token.TokenDataWitness{BlindingFactor: inBF[i], Value: inValues[i], Type: ttype}
intw[i] = &token.Metadata{BlindingFactor: inBF[i], Value: c.NewZrFromUint64(inValues[i]), Type: ttype}
}

outtw := make([]*token.TokenDataWitness, len(outValues))
outtw := make([]*token.Metadata, len(outValues))
for i := 0; i < len(outtw); i++ {
outtw[i] = &token.TokenDataWitness{BlindingFactor: outBF[i], Value: outValues[i], Type: ttype}
outtw[i] = &token.Metadata{BlindingFactor: outBF[i], Value: c.NewZrFromUint64(outValues[i]), Type: ttype}
}

return intw, outtw, in, out
Expand Down
6 changes: 3 additions & 3 deletions token/core/zkatdlog/nogh/v1/transfer/typeandsum.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,17 +111,17 @@ type TypeAndSumWitness struct {
}

// NewTypeAndSumWitness returns a TypeAndSumWitness as a function of the passed arguments
func NewTypeAndSumWitness(bf *math.Zr, in, out []*token.TokenDataWitness, c *math.Curve) *TypeAndSumWitness {
func NewTypeAndSumWitness(bf *math.Zr, in, out []*token.Metadata, c *math.Curve) *TypeAndSumWitness {
inValues := make([]*math.Zr, len(in))
outValues := make([]*math.Zr, len(out))
inBF := make([]*math.Zr, len(in))
outBF := make([]*math.Zr, len(out))
for i := 0; i < len(in); i++ {
inValues[i] = c.NewZrFromUint64(in[i].Value)
inValues[i] = in[i].Value
inBF[i] = in[i].BlindingFactor
}
for i := 0; i < len(out); i++ {
outValues[i] = c.NewZrFromUint64(out[i].Value)
outValues[i] = out[i].Value
outBF[i] = out[i].BlindingFactor
}
return &TypeAndSumWitness{inValues: inValues, outValues: outValues, Type: c.HashToZr([]byte(in[0].Type)), inBlindingFactors: inBF, outBlindingFactors: outBF, typeBlindingFactor: bf}
Expand Down
8 changes: 4 additions & 4 deletions token/core/zkatdlog/nogh/v1/transfer/typeandsum_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,14 +180,14 @@ func prepareIOCProver(pp []*math.G1, c *math.Curve) (*transfer.TypeAndSumWitness

in, out := prepareInputsOutputs(inValues, outValues, inBF, outBF, ttype, pp, c)

intw := make([]*token.TokenDataWitness, len(inValues))
intw := make([]*token.Metadata, len(inValues))
for i := 0; i < len(intw); i++ {
intw[i] = &token.TokenDataWitness{BlindingFactor: inBF[i], Value: inValues[i], Type: ttype}
intw[i] = &token.Metadata{BlindingFactor: inBF[i], Value: c.NewZrFromUint64(inValues[i]), Type: ttype}
}

outtw := make([]*token.TokenDataWitness, len(outValues))
outtw := make([]*token.Metadata, len(outValues))
for i := 0; i < len(outtw); i++ {
outtw[i] = &token.TokenDataWitness{BlindingFactor: outBF[i], Value: outValues[i], Type: ttype}
outtw[i] = &token.Metadata{BlindingFactor: outBF[i], Value: c.NewZrFromUint64(outValues[i]), Type: ttype}
}
typeBlindingFactor := c.NewRandomZr(rand)
commitmentToType := pp[0].Mul(c.HashToZr([]byte(ttype)))
Expand Down