10
10
import stat
11
11
import subprocess
12
12
import tempfile
13
-
13
+ from typing import Any
14
14
from portage import _encodings , _unicode_decode , _unicode_encode
15
15
from portage import os
16
16
from portage .const import HASHING_BLOCKSIZE , PRELINK_BINARY
34
34
35
35
36
36
# Dict of all available hash functions
37
- hashfunc_map = {}
37
+ hashfunc_map : dict [ str , Any ] = {}
38
38
hashorigin_map = {}
39
39
40
40
41
- def _open_file (filename ):
41
+ def _open_file (filename : str | bytes ):
42
42
try :
43
43
return open (
44
44
_unicode_encode (filename , encoding = _encodings ["fs" ], errors = "strict" ), "rb"
@@ -58,7 +58,7 @@ def _open_file(filename):
58
58
class _generate_hash_function :
59
59
__slots__ = ("_hashobject" ,)
60
60
61
- def __init__ (self , hashtype , hashobject , origin = "unknown" ):
61
+ def __init__ (self , hashtype , hashobject , origin : str = "unknown" ):
62
62
self ._hashobject = hashobject
63
63
hashfunc_map [hashtype ] = self
64
64
hashorigin_map [hashtype ] = origin
@@ -75,7 +75,7 @@ def checksum_str(self, data):
75
75
checksum .update (data )
76
76
return checksum .hexdigest ()
77
77
78
- def checksum_file (self , filename ) :
78
+ def checksum_file (self , filename : str ) -> tuple [ Any , int ] :
79
79
"""
80
80
Run a checksum against a file.
81
81
@@ -177,15 +177,15 @@ def checksum_file(self, filename):
177
177
178
178
# There is only one implementation for size
179
179
class SizeHash :
180
- def checksum_file (self , filename ) :
180
+ def checksum_file (self , filename : str ) -> tuple [ int , int ] :
181
181
size = os .stat (filename ).st_size
182
182
return (size , size )
183
183
184
184
185
185
hashfunc_map ["size" ] = SizeHash ()
186
186
187
187
# cache all supported hash methods in a frozenset
188
- hashfunc_keys = frozenset (hashfunc_map )
188
+ hashfunc_keys : set [ str ] = frozenset (hashfunc_map )
189
189
190
190
# end actual hash functions
191
191
@@ -202,7 +202,7 @@ def checksum_file(self, filename):
202
202
del cmd , proc , status
203
203
204
204
205
- def is_prelinkable_elf (filename ) :
205
+ def is_prelinkable_elf (filename : bytes ) -> bool :
206
206
with _open_file (filename ) as f :
207
207
magic = f .read (17 )
208
208
return (
@@ -212,26 +212,26 @@ def is_prelinkable_elf(filename):
212
212
) # 2=ET_EXEC, 3=ET_DYN
213
213
214
214
215
- def perform_md5 (x , calc_prelink = 0 ) :
215
+ def perform_md5 (x : str , calc_prelink : int = 0 ) -> tuple [ bytes , int ] :
216
216
return perform_checksum (x , "MD5" , calc_prelink )[0 ]
217
217
218
218
219
- def _perform_md5_merge (x , ** kwargs ):
219
+ def _perform_md5_merge (x : str , ** kwargs ) -> bytes :
220
220
return perform_md5 (
221
221
_unicode_encode (x , encoding = _encodings ["merge" ], errors = "strict" ), ** kwargs
222
222
)
223
223
224
224
225
- def perform_all (x , calc_prelink = 0 ) :
225
+ def perform_all (x : str , calc_prelink : int = 0 ) -> dict [ str , bytes ] :
226
226
mydict = {k : perform_checksum (x , k , calc_prelink )[0 ] for k in hashfunc_keys }
227
227
return mydict
228
228
229
229
230
- def get_valid_checksum_keys ():
230
+ def get_valid_checksum_keys () -> set [ str ] :
231
231
return hashfunc_keys
232
232
233
233
234
- def get_hash_origin (hashtype ):
234
+ def get_hash_origin (hashtype : str ):
235
235
if hashtype not in hashfunc_keys :
236
236
raise KeyError (hashtype )
237
237
return hashorigin_map .get (hashtype , "unknown" )
@@ -266,15 +266,15 @@ class _hash_filter:
266
266
"_tokens" ,
267
267
)
268
268
269
- def __init__ (self , filter_str ):
269
+ def __init__ (self , filter_str : str ):
270
270
tokens = filter_str .upper ().split ()
271
271
if not tokens or tokens [- 1 ] == "*" :
272
272
del tokens [:]
273
273
self .transparent = not tokens
274
274
tokens .reverse ()
275
275
self ._tokens = tuple (tokens )
276
276
277
- def __call__ (self , hash_name ) :
277
+ def __call__ (self , hash_name : str ) -> bool :
278
278
if self .transparent :
279
279
return True
280
280
matches = ("*" , hash_name )
@@ -286,7 +286,9 @@ def __call__(self, hash_name):
286
286
return False
287
287
288
288
289
- def _apply_hash_filter (digests , hash_filter ):
289
+ def _apply_hash_filter (
290
+ digests : dict [str , str ], hash_filter : callable [[str ], bool ]
291
+ ) -> dict [str , str ]:
290
292
"""
291
293
Return a new dict containing the filtered digests, or the same
292
294
dict if no changes are necessary. This will always preserve at
@@ -321,7 +323,12 @@ def _apply_hash_filter(digests, hash_filter):
321
323
return digests
322
324
323
325
324
- def verify_all (filename , mydict , calc_prelink = 0 , strict = 0 ):
326
+ def verify_all (
327
+ filename : str | bytes ,
328
+ mydict : dict [str , any ],
329
+ calc_prelink : int = 0 ,
330
+ strict : int = 0 ,
331
+ ):
325
332
"""
326
333
Verify all checksums against a file.
327
334
@@ -388,7 +395,9 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
388
395
return file_is_ok , reason
389
396
390
397
391
- def perform_checksum (filename , hashname = "MD5" , calc_prelink = 0 ):
398
+ def perform_checksum (
399
+ filename : str , hashname : str = "MD5" , calc_prelink : int = 0
400
+ ) -> tuple [bytes , int ]:
392
401
"""
393
402
Run a specific checksum against a file. The filename can
394
403
be either unicode or an encoded byte string. If filename
@@ -450,7 +459,9 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0):
450
459
del e
451
460
452
461
453
- def perform_multiple_checksums (filename , hashes = ["MD5" ], calc_prelink = 0 ):
462
+ def perform_multiple_checksums (
463
+ filename : str , hashes : list [str ] = ["MD5" ], calc_prelink : int = 0
464
+ ) -> dict [str , bytes ]:
454
465
"""
455
466
Run a group of checksums against a file.
456
467
@@ -475,7 +486,7 @@ def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
475
486
return rVal
476
487
477
488
478
- def checksum_str (data , hashname = "MD5" ):
489
+ def checksum_str (data : bytes , hashname : str = "MD5" ):
479
490
"""
480
491
Run a specific checksum against a byte string.
481
492
0 commit comments