-
Notifications
You must be signed in to change notification settings - Fork 34
Expand file tree
/
Copy pathpacked_ghash_128.rs
More file actions
94 lines (81 loc) · 2.65 KB
/
packed_ghash_128.rs
File metadata and controls
94 lines (81 loc) · 2.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
// Copyright 2023-2025 Irreducible Inc.
// Copyright (c) 2019-2023 RustCrypto Developers
//! ARMv8 `PMULL`-accelerated implementation of GHASH.
//!
//! Based on the optimized GHASH implementation using carryless multiplication
//! instructions available on ARMv8 processors with NEON support.
use core::arch::aarch64::*;
use super::m128::M128;
use crate::{
BinaryField128bGhash,
arch::{
portable::packed_macros::{portable_macros::*, *},
shared::ghash::ClMulUnderlier,
},
arithmetic_traits::{
InvertOrZero, TaggedInvertOrZero, TaggedMul, TaggedSquare, impl_invert_with, impl_mul_with,
impl_square_with,
},
};
impl ClMulUnderlier for M128 {
#[inline]
fn clmulepi64<const IMM8: i32>(a: Self, b: Self) -> Self {
let a_u64x2: uint64x2_t = a.into();
let b_u64x2: uint64x2_t = b.into();
let result = match IMM8 {
0x00 => unsafe { vmull_p64(vgetq_lane_u64(a_u64x2, 0), vgetq_lane_u64(b_u64x2, 0)) },
0x11 => unsafe { vmull_p64(vgetq_lane_u64(a_u64x2, 1), vgetq_lane_u64(b_u64x2, 1)) },
0x10 => unsafe { vmull_p64(vgetq_lane_u64(a_u64x2, 0), vgetq_lane_u64(b_u64x2, 1)) },
0x01 => unsafe { vmull_p64(vgetq_lane_u64(a_u64x2, 1), vgetq_lane_u64(b_u64x2, 0)) },
_ => panic!("Unsupported IMM8 value for clmulepi64"),
};
unsafe { std::mem::transmute::<u128, uint64x2_t>(result) }.into()
}
#[inline]
fn move_64_to_hi(a: Self) -> Self {
let a_bytes: uint8x16_t = a.into();
// Shift left by 8 bytes
unsafe {
let zero = vdupq_n_u8(0);
vextq_u8::<8>(zero, a_bytes).into()
}
}
}
/// Strategy for aarch64 GHASH field arithmetic operations.
pub struct GhashStrategy;
// Define PackedBinaryGhash1x128b using the macro
define_packed_binary_field!(
PackedBinaryGhash1x128b,
BinaryField128bGhash,
M128,
(GhashStrategy),
(GhashStrategy),
(GhashStrategy),
(None)
);
// Implement TaggedMul for GhashStrategy
impl TaggedMul<GhashStrategy> for PackedBinaryGhash1x128b {
#[inline]
fn mul(self, rhs: Self) -> Self {
Self::from_underlier(crate::arch::shared::ghash::mul_clmul(
self.to_underlier(),
rhs.to_underlier(),
))
}
}
// Implement TaggedSquare for GhashStrategy
impl TaggedSquare<GhashStrategy> for PackedBinaryGhash1x128b {
#[inline]
fn square(self) -> Self {
Self::from_underlier(crate::arch::shared::ghash::square_clmul(self.to_underlier()))
}
}
// Implement TaggedInvertOrZero for GhashStrategy (uses portable fallback)
impl TaggedInvertOrZero<GhashStrategy> for PackedBinaryGhash1x128b {
fn invert_or_zero(self) -> Self {
let portable = super::super::portable::packed_ghash_128::PackedBinaryGhash1x128b::from(
u128::from(self.to_underlier()),
);
Self::from_underlier(InvertOrZero::invert_or_zero(portable).to_underlier().into())
}
}