|
| 1 | +/* |
| 2 | +Copyright IBM Corp. All Rights Reserved. |
| 3 | +
|
| 4 | +SPDX-License-Identifier: Apache-2.0 |
| 5 | +*/ |
| 6 | + |
| 7 | +package regression_test |
| 8 | + |
| 9 | +import ( |
| 10 | + "embed" |
| 11 | + "encoding/base64" |
| 12 | + "encoding/json" |
| 13 | + "fmt" |
| 14 | + "path/filepath" |
| 15 | + "testing" |
| 16 | + |
| 17 | + "github.com/hyperledger-labs/fabric-token-sdk/token" |
| 18 | + "github.com/hyperledger-labs/fabric-token-sdk/token/core" |
| 19 | + fabtoken "github.com/hyperledger-labs/fabric-token-sdk/token/core/fabtoken/v1/driver" |
| 20 | + dlog "github.com/hyperledger-labs/fabric-token-sdk/token/core/zkatdlog/nogh/v1/driver" |
| 21 | + "github.com/hyperledger-labs/fabric-token-sdk/token/services/network/fabric/tcc" |
| 22 | + tk "github.com/hyperledger-labs/fabric-token-sdk/token/token" |
| 23 | + "github.com/stretchr/testify/require" |
| 24 | +) |
| 25 | + |
| 26 | +//go:embed testdata |
| 27 | +var testDataFS embed.FS |
| 28 | + |
| 29 | +// TestRegression runs previously recorded regression test vectors contained in the |
| 30 | +// `testdata/<variant>` directories. Each directory contains a `params.txt` and a |
| 31 | +// sequence of JSON files under `transfers/output.<n>.json` representing serialized |
| 32 | +// token requests and their associated txids. The test unmarshals each recorded |
| 33 | +// request and verifies it against the validator to ensure the library remains |
| 34 | +// backwards compatible. |
| 35 | +// |
| 36 | +// Notes: |
| 37 | +// - The testdata used here is generated by `testdata/generator`. If you need to |
| 38 | +// re-generate vectors for a new setup, run that generator and commit the |
| 39 | +// produced artifacts to the corresponding `testdata/...` directory. |
| 40 | +// - The test expects 64 transfer vectors (output.0..output.63). Update the loop |
| 41 | +// range in `testRegression` if you add or remove vectors. |
| 42 | +func TestRegression(t *testing.T) { |
| 43 | + t.Parallel() |
| 44 | + for _, action := range []string{"transfers", "issues", "redeems", "swaps"} { |
| 45 | + testRegressionParallel(t, "testdata/32-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i1_o1", action)) |
| 46 | + testRegressionParallel(t, "testdata/32-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i1_o2", action)) |
| 47 | + testRegressionParallel(t, "testdata/32-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i2_o1", action)) |
| 48 | + testRegressionParallel(t, "testdata/32-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i2_o2", action)) |
| 49 | + |
| 50 | + testRegressionParallel(t, "testdata/64-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i1_o1", action)) |
| 51 | + testRegressionParallel(t, "testdata/64-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i1_o2", action)) |
| 52 | + testRegressionParallel(t, "testdata/64-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i2_o1", action)) |
| 53 | + testRegressionParallel(t, "testdata/64-BLS12_381_BBS_GURVY", fmt.Sprintf("%s_i2_o2", action)) |
| 54 | + |
| 55 | + testRegressionParallel(t, "testdata/32-BN254", fmt.Sprintf("%s_i1_o1", action)) |
| 56 | + testRegressionParallel(t, "testdata/32-BN254", fmt.Sprintf("%s_i1_o2", action)) |
| 57 | + testRegressionParallel(t, "testdata/32-BN254", fmt.Sprintf("%s_i2_o1", action)) |
| 58 | + testRegressionParallel(t, "testdata/32-BN254", fmt.Sprintf("%s_i2_o2", action)) |
| 59 | + |
| 60 | + testRegressionParallel(t, "testdata/64-BN254", fmt.Sprintf("%s_i1_o1", action)) |
| 61 | + testRegressionParallel(t, "testdata/64-BN254", fmt.Sprintf("%s_i1_o2", action)) |
| 62 | + testRegressionParallel(t, "testdata/64-BN254", fmt.Sprintf("%s_i2_o1", action)) |
| 63 | + testRegressionParallel(t, "testdata/64-BN254", fmt.Sprintf("%s_i2_o2", action)) |
| 64 | + } |
| 65 | +} |
| 66 | + |
| 67 | +func testRegressionParallel(t *testing.T, rootDir, subFolder string) { |
| 68 | + t.Helper() |
| 69 | + t.Run(fmt.Sprintf("%s-%s", rootDir, subFolder), func(t *testing.T) { |
| 70 | + t.Parallel() |
| 71 | + testRegression(t, rootDir, subFolder) |
| 72 | + }) |
| 73 | +} |
| 74 | + |
| 75 | +func testRegression(t *testing.T, rootDir, subFolder string) { |
| 76 | + t.Helper() |
| 77 | + t.Logf("regression test for [%s:%s]", rootDir, subFolder) |
| 78 | + paramsData, err := testDataFS.ReadFile(filepath.Join(rootDir, "params.txt")) |
| 79 | + require.NoError(t, err) |
| 80 | + |
| 81 | + ppRaw, err := base64.StdEncoding.DecodeString(string(paramsData)) |
| 82 | + require.NoError(t, err) |
| 83 | + |
| 84 | + _, tokenValidator, err := tokenServicesFactory(ppRaw) |
| 85 | + require.NoError(t, err) |
| 86 | + |
| 87 | + var tokenData struct { |
| 88 | + ReqRaw []byte `json:"req_raw"` |
| 89 | + TXID string `json:"txid"` |
| 90 | + } |
| 91 | + for i := range 64 { |
| 92 | + jsonData, err := testDataFS.ReadFile( |
| 93 | + filepath.Join( |
| 94 | + rootDir, |
| 95 | + subFolder, |
| 96 | + fmt.Sprintf("output.%d.json", i), |
| 97 | + ), |
| 98 | + ) |
| 99 | + require.NoError(t, err) |
| 100 | + err = json.Unmarshal(jsonData, &tokenData) |
| 101 | + require.NoError(t, err) |
| 102 | + _, _, err = tokenValidator.UnmarshallAndVerifyWithMetadata( |
| 103 | + t.Context(), |
| 104 | + &fakeLedger{}, |
| 105 | + token.RequestAnchor(tokenData.TXID), |
| 106 | + tokenData.ReqRaw, |
| 107 | + ) |
| 108 | + require.NoError(t, err) |
| 109 | + } |
| 110 | +} |
| 111 | + |
| 112 | +func tokenServicesFactory(bytes []byte) (tcc.PublicParameters, tcc.Validator, error) { |
| 113 | + is := core.NewPPManagerFactoryService(fabtoken.NewPPMFactory(), dlog.NewPPMFactory()) |
| 114 | + |
| 115 | + ppm, err := is.PublicParametersFromBytes(bytes) |
| 116 | + if err != nil { |
| 117 | + return nil, nil, err |
| 118 | + } |
| 119 | + v, err := is.DefaultValidator(ppm) |
| 120 | + if err != nil { |
| 121 | + return nil, nil, err |
| 122 | + } |
| 123 | + return ppm, token.NewValidator(v), nil |
| 124 | +} |
| 125 | + |
| 126 | +type fakeLedger struct{} |
| 127 | + |
| 128 | +func (*fakeLedger) GetState(_ tk.ID) ([]byte, error) { |
| 129 | + panic("ciao") |
| 130 | +} |
0 commit comments