@@ -18,10 +18,10 @@ use byteorder::{LittleEndian, ByteOrder};
18
18
use rlp:: Encodable ;
19
19
use core:: ops:: BitXor ;
20
20
21
- pub const DATASET_BYTES_INIT : usize = 1073741824 ; // 2 to the power of 30.
22
- pub const DATASET_BYTES_GROWTH : usize = 8388608 ; // 2 to the power of 23.
23
- pub const CACHE_BYTES_INIT : usize = 16777216 ; // 2 to the power of 24.
24
- pub const CACHE_BYTES_GROWTH : usize = 131072 ; // 2 to the power of 17.
21
+ pub const DATASET_BYTES_INIT : u64 = 1073741824 ; // 2 to the power of 30.
22
+ pub const DATASET_BYTES_GROWTH : u64 = 8388608 ; // 2 to the power of 23.
23
+ pub const CACHE_BYTES_INIT : u64 = 16777216 ; // 2 to the power of 24.
24
+ pub const CACHE_BYTES_GROWTH : u64 = 131072 ; // 2 to the power of 17.
25
25
pub const CACHE_MULTIPLIER : usize = 1024 ;
26
26
pub const MIX_BYTES : usize = 128 ;
27
27
pub const WORD_BYTES : usize = 4 ;
@@ -32,20 +32,22 @@ pub const ACCESSES: usize = 64;
32
32
33
33
/// Get the cache size required given the block number.
34
34
pub fn get_cache_size ( epoch : usize ) -> usize {
35
- let mut sz = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * epoch;
36
- sz -= HASH_BYTES ;
37
- while !is_prime ( sz / HASH_BYTES ) {
38
- sz -= 2 * HASH_BYTES ;
35
+ let mut sz = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * ( epoch as u64 ) ;
36
+ let hash_bytes_64 = HASH_BYTES as u64 ;
37
+ sz -= hash_bytes_64;
38
+ while !is_prime ( sz / hash_bytes_64) {
39
+ sz -= 2 * hash_bytes_64;
39
40
}
40
- sz
41
+ sz as usize
41
42
}
42
43
43
44
/// Get the full dataset size given the block number.
44
- pub fn get_full_size ( epoch : usize ) -> usize {
45
- let mut sz = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * epoch;
46
- sz -= MIX_BYTES ;
47
- while !is_prime ( sz / MIX_BYTES ) {
48
- sz -= 2 * MIX_BYTES
45
+ pub fn get_full_size ( epoch : usize ) -> u64 {
46
+ let mut sz = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * ( epoch as u64 ) ;
47
+ let mix_bytes_64 = MIX_BYTES as u64 ;
48
+ sz -= mix_bytes_64;
49
+ while !is_prime ( sz / mix_bytes_64) {
50
+ sz -= 2 * mix_bytes_64
49
51
}
50
52
sz
51
53
}
@@ -179,7 +181,7 @@ pub fn make_dataset(dataset: &mut [u8], cache: &[u8]) {
179
181
/// "Main" function of Ethash, calculating the mix digest and result given the
180
182
/// header and nonce.
181
183
pub fn hashimoto < F : Fn ( usize ) -> H512 > (
182
- header_hash : H256 , nonce : H64 , full_size : usize , lookup : F
184
+ header_hash : H256 , nonce : H64 , full_size : u64 , lookup : F
183
185
) -> ( H256 , H256 ) {
184
186
hashimoto_with_hasher (
185
187
header_hash,
@@ -204,11 +206,12 @@ pub fn hashimoto<F: Fn(usize) -> H512>(
204
206
}
205
207
206
208
pub fn hashimoto_with_hasher < F : Fn ( usize ) -> H512 , HF256 : Fn ( & [ u8 ] ) -> [ u8 ; 32 ] , HF512 : Fn ( & [ u8 ] ) -> [ u8 ; 64 ] > (
207
- header_hash : H256 , nonce : H64 , full_size : usize , lookup : F , hasher256 : HF256 , hasher512 : HF512
209
+ header_hash : H256 , nonce : H64 , full_size : u64 , lookup : F , hasher256 : HF256 , hasher512 : HF512
208
210
) -> ( H256 , H256 ) {
209
- let n = full_size / HASH_BYTES ;
211
+ let n = full_size / ( HASH_BYTES as u64 ) ;
210
212
let w = MIX_BYTES / WORD_BYTES ;
211
213
const MIXHASHES : usize = MIX_BYTES / HASH_BYTES ;
214
+ const MIXHASHES_64 : u64 = MIXHASHES as u64 ;
212
215
let s = {
213
216
let mut data = [ 0u8 ; 40 ] ;
214
217
data[ ..32 ] . copy_from_slice ( & header_hash. 0 ) ;
@@ -224,9 +227,9 @@ pub fn hashimoto_with_hasher<F: Fn(usize) -> H512, HF256: Fn(&[u8]) -> [u8; 32],
224
227
}
225
228
226
229
for i in 0 ..ACCESSES {
227
- let p = ( fnv ( ( i as u32 ) . bitxor ( LittleEndian :: read_u32 ( s. as_ref ( ) ) ) ,
230
+ let p = ( ( fnv ( ( i as u32 ) . bitxor ( LittleEndian :: read_u32 ( s. as_ref ( ) ) ) ,
228
231
LittleEndian :: read_u32 ( & mix[ ( i % w * 4 ) ..] ) )
229
- as usize ) % ( n / MIXHASHES ) * MIXHASHES ;
232
+ as u64 ) % ( n / MIXHASHES_64 ) * MIXHASHES_64 ) as usize ;
230
233
let mut newdata = [ 0u8 ; MIX_BYTES ] ;
231
234
for j in 0 ..MIXHASHES {
232
235
let v = lookup ( p + j) ;
@@ -258,7 +261,7 @@ pub fn hashimoto_with_hasher<F: Fn(usize) -> H512, HF256: Fn(&[u8]) -> [u8; 32],
258
261
/// Ethash used by a light client. Only stores the 16MB cache rather than the
259
262
/// full dataset.
260
263
pub fn hashimoto_light (
261
- header_hash : H256 , nonce : H64 , full_size : usize , cache : & [ u8 ]
264
+ header_hash : H256 , nonce : H64 , full_size : u64 , cache : & [ u8 ]
262
265
) -> ( H256 , H256 ) {
263
266
hashimoto ( header_hash, nonce, full_size, |i| {
264
267
calc_dataset_item ( cache, i)
@@ -267,7 +270,7 @@ pub fn hashimoto_light(
267
270
268
271
/// Ethash used by a full client. Stores the whole dataset in memory.
269
272
pub fn hashimoto_full (
270
- header_hash : H256 , nonce : H64 , full_size : usize , dataset : & [ u8 ]
273
+ header_hash : H256 , nonce : H64 , full_size : u64 , dataset : & [ u8 ]
271
274
) -> ( H256 , H256 ) {
272
275
hashimoto ( header_hash, nonce, full_size, |i| {
273
276
let mut r = [ 0u8 ; 64 ] ;
@@ -290,7 +293,7 @@ pub fn cross_boundary(val: U256) -> U256 {
290
293
/// Mine a nonce given the header, dataset, and the target. Target is derived
291
294
/// from the difficulty.
292
295
pub fn mine < T : Encodable > (
293
- header : & T , full_size : usize , dataset : & [ u8 ] , nonce_start : H64 , difficulty : U256
296
+ header : & T , full_size : u64 , dataset : & [ u8 ] , nonce_start : H64 , difficulty : U256
294
297
) -> ( H64 , H256 ) {
295
298
let target = cross_boundary ( difficulty) ;
296
299
let header = rlp:: encode ( header) . to_vec ( ) ;
0 commit comments