diff --git a/hw/ip/aes/dv/env/seq_lib/aes_base_vseq.sv b/hw/ip/aes/dv/env/seq_lib/aes_base_vseq.sv index 7e136cc083534..81036b4704fdf 100644 --- a/hw/ip/aes/dv/env/seq_lib/aes_base_vseq.sv +++ b/hw/ip/aes/dv/env/seq_lib/aes_base_vseq.sv @@ -133,6 +133,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.operation.get_mirrored_value() != operation) begin ral.ctrl_shadowed.operation.set(operation); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.operation.predict(operation)); end endtask // set_operation @@ -141,6 +142,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.mode.get_mirrored_value() != mode) begin ral.ctrl_shadowed.mode.set(mode); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.mode.predict(mode)); end endtask @@ -149,6 +151,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.key_len.get_mirrored_value() != key_len) begin ral.ctrl_shadowed.key_len.set(key_len); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.key_len.predict(key_len)); end endtask // set_key_len @@ -157,6 +160,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.sideload.get_mirrored_value() != sideload) begin ral.ctrl_shadowed.sideload.set(sideload); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.sideload.predict(sideload)); end endtask @@ -165,6 +169,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.prng_reseed_rate.get_mirrored_value() != reseed_rate) begin ral.ctrl_shadowed.prng_reseed_rate.set(reseed_rate); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.prng_reseed_rate.predict(reseed_rate)); end endtask @@ -173,6 +178,7 @@ class aes_base_vseq extends cip_base_vseq #( if (ral.ctrl_shadowed.manual_operation.get_mirrored_value() != manual_operation) begin ral.ctrl_shadowed.manual_operation.set(manual_operation); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.manual_operation.predict(manual_operation)); end endtask @@ -209,6 +215,8 @@ class aes_base_vseq extends cip_base_vseq #( ral.ctrl_gcm_shadowed.phase.set(phase); ral.ctrl_gcm_shadowed.num_valid_bytes.set(num_bytes); csr_update(.csr(ral.ctrl_gcm_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_gcm_shadowed.phase.predict(phase)); + void'(ral.ctrl_gcm_shadowed.num_valid_bytes.set(num_bytes)); endtask virtual task add_data(ref bit [3:0] [31:0] data, bit do_b2b); @@ -1029,6 +1037,11 @@ class aes_base_vseq extends cip_base_vseq #( csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(is_blocking)); end + // Read the main control register. This will update the mirrored values thereby getting them + // back in sync with the DUT (updated via csr_update() above) and the predicted values (updated + // via set() above). + csr_rd(.ptr(ral.ctrl_shadowed), .value(ctrl), .backdoor(1)); + if (cfg_item.mode == AES_GCM && !status.alert_fatal_fault) begin // As we are splitting the message, we also need to recalculate the length // of the AAD and PTX -> len(aad) || len(data) that is stored in a AES_GCM_TAG diff --git a/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_gcm_vseq.sv b/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_gcm_vseq.sv index 87b95b0590cb6..81c7f92c5462f 100644 --- a/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_gcm_vseq.sv +++ b/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_gcm_vseq.sv @@ -64,6 +64,10 @@ class aes_nist_vectors_gcm_vseq extends aes_base_vseq; ral.ctrl_shadowed.mode.set(nist_vectors[i].mode); ral.ctrl_shadowed.prng_reseed_rate.set(PER_8K); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.operation.predict(AES_ENC)); + void'(ral.ctrl_shadowed.key_len.predict(nist_vectors[i].key_len)); + void'(ral.ctrl_shadowed.mode.predict(nist_vectors[i].mode)); + void'(ral.ctrl_shadowed.prng_reseed_rate.predict(PER_8K)); // Put AES-GCM into init phase. cov_if.cg_ctrl_gcm_reg_sample(GCM_INIT); @@ -191,6 +195,9 @@ class aes_nist_vectors_gcm_vseq extends aes_base_vseq; ral.ctrl_shadowed.key_len.set(nist_vectors[i].key_len); ral.ctrl_shadowed.mode.set(nist_vectors[i].mode); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.operation.predict(AES_DEC)); + void'(ral.ctrl_shadowed.key_len.predict(nist_vectors[i].key_len)); + void'(ral.ctrl_shadowed.mode.predict(nist_vectors[i].mode)); // Put AES-GCM into init phase. cov_if.cg_ctrl_gcm_reg_sample(GCM_INIT); diff --git a/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_vseq.sv b/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_vseq.sv index a8e117dd0e5ec..55aee16377947 100644 --- a/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_vseq.sv +++ b/hw/ip/aes/dv/env/seq_lib/aes_nist_vectors_vseq.sv @@ -46,6 +46,9 @@ class aes_nist_vectors_vseq extends aes_base_vseq; ral.ctrl_shadowed.key_len.set(nist_vectors[i].key_len); ral.ctrl_shadowed.mode.set(nist_vectors[i].mode); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.operation.predict(AES_ENC)); + void'(ral.ctrl_shadowed.key_len.predict(nist_vectors[i].key_len)); + void'(ral.ctrl_shadowed.mode.predict(nist_vectors[i].mode)); // transpose key To match NIST format ( little endian) init_key = '{ {<<8{nist_vectors[i].key}} , 256'h0 }; write_key(init_key, do_b2b); @@ -84,6 +87,9 @@ class aes_nist_vectors_vseq extends aes_base_vseq; ral.ctrl_shadowed.key_len.set(nist_vectors[i].key_len); ral.ctrl_shadowed.mode.set(nist_vectors[i].mode); csr_update(.csr(ral.ctrl_shadowed), .en_shadow_wr(1'b1), .blocking(1)); + void'(ral.ctrl_shadowed.operation.predict(AES_DEC)); + void'(ral.ctrl_shadowed.key_len.predict(nist_vectors[i].key_len)); + void'(ral.ctrl_shadowed.mode.predict(nist_vectors[i].mode)); // transpose key To match NIST format ( little endian) init_key = '{ {<<8{nist_vectors[i].key}} , 256'h0 };