Skip to content

Commit dc12bd0

Browse files
committed
type hint: add type hint for checksum and xpak
1 parent 7c9bb2c commit dc12bd0

File tree

3 files changed

+63
-47
lines changed

3 files changed

+63
-47
lines changed

Diff for: lib/portage/__init__.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -226,13 +226,15 @@ def _decode_argv(argv):
226226
return [_unicode_decode(x.encode(fs_encoding, "surrogateescape")) for x in argv]
227227

228228

229-
def _unicode_encode(s, encoding=_encodings["content"], errors="backslashreplace"):
229+
def _unicode_encode(
230+
s, encoding=_encodings["content"], errors="backslashreplace"
231+
) -> bytes:
230232
if isinstance(s, str):
231233
s = s.encode(encoding, errors)
232234
return s
233235

234236

235-
def _unicode_decode(s, encoding=_encodings["content"], errors="replace"):
237+
def _unicode_decode(s, encoding=_encodings["content"], errors="replace") -> str:
236238
if isinstance(s, bytes):
237239
s = str(s, encoding=encoding, errors=errors)
238240
return s

Diff for: lib/portage/checksum.py

+31-20
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import stat
1111
import subprocess
1212
import tempfile
13-
13+
from typing import Any
1414
from portage import _encodings, _unicode_decode, _unicode_encode
1515
from portage import os
1616
from portage.const import HASHING_BLOCKSIZE, PRELINK_BINARY
@@ -34,11 +34,11 @@
3434

3535

3636
# Dict of all available hash functions
37-
hashfunc_map = {}
37+
hashfunc_map: dict[str, Any] = {}
3838
hashorigin_map = {}
3939

4040

41-
def _open_file(filename):
41+
def _open_file(filename: str | bytes):
4242
try:
4343
return open(
4444
_unicode_encode(filename, encoding=_encodings["fs"], errors="strict"), "rb"
@@ -58,7 +58,7 @@ def _open_file(filename):
5858
class _generate_hash_function:
5959
__slots__ = ("_hashobject",)
6060

61-
def __init__(self, hashtype, hashobject, origin="unknown"):
61+
def __init__(self, hashtype, hashobject, origin: str = "unknown"):
6262
self._hashobject = hashobject
6363
hashfunc_map[hashtype] = self
6464
hashorigin_map[hashtype] = origin
@@ -75,7 +75,7 @@ def checksum_str(self, data):
7575
checksum.update(data)
7676
return checksum.hexdigest()
7777

78-
def checksum_file(self, filename):
78+
def checksum_file(self, filename: str) -> tuple[Any, int]:
7979
"""
8080
Run a checksum against a file.
8181
@@ -177,15 +177,15 @@ def checksum_file(self, filename):
177177

178178
# There is only one implementation for size
179179
class SizeHash:
180-
def checksum_file(self, filename):
180+
def checksum_file(self, filename: str) -> tuple[int, int]:
181181
size = os.stat(filename).st_size
182182
return (size, size)
183183

184184

185185
hashfunc_map["size"] = SizeHash()
186186

187187
# cache all supported hash methods in a frozenset
188-
hashfunc_keys = frozenset(hashfunc_map)
188+
hashfunc_keys: set[str] = frozenset(hashfunc_map)
189189

190190
# end actual hash functions
191191

@@ -202,7 +202,7 @@ def checksum_file(self, filename):
202202
del cmd, proc, status
203203

204204

205-
def is_prelinkable_elf(filename):
205+
def is_prelinkable_elf(filename: bytes) -> bool:
206206
with _open_file(filename) as f:
207207
magic = f.read(17)
208208
return (
@@ -212,26 +212,26 @@ def is_prelinkable_elf(filename):
212212
) # 2=ET_EXEC, 3=ET_DYN
213213

214214

215-
def perform_md5(x, calc_prelink=0):
215+
def perform_md5(x: str, calc_prelink: int = 0) -> tuple[bytes, int]:
216216
return perform_checksum(x, "MD5", calc_prelink)[0]
217217

218218

219-
def _perform_md5_merge(x, **kwargs):
219+
def _perform_md5_merge(x: str, **kwargs) -> bytes:
220220
return perform_md5(
221221
_unicode_encode(x, encoding=_encodings["merge"], errors="strict"), **kwargs
222222
)
223223

224224

225-
def perform_all(x, calc_prelink=0):
225+
def perform_all(x: str, calc_prelink: int = 0) -> dict[str, bytes]:
226226
mydict = {k: perform_checksum(x, k, calc_prelink)[0] for k in hashfunc_keys}
227227
return mydict
228228

229229

230-
def get_valid_checksum_keys():
230+
def get_valid_checksum_keys() -> set[str]:
231231
return hashfunc_keys
232232

233233

234-
def get_hash_origin(hashtype):
234+
def get_hash_origin(hashtype: str):
235235
if hashtype not in hashfunc_keys:
236236
raise KeyError(hashtype)
237237
return hashorigin_map.get(hashtype, "unknown")
@@ -266,15 +266,15 @@ class _hash_filter:
266266
"_tokens",
267267
)
268268

269-
def __init__(self, filter_str):
269+
def __init__(self, filter_str: str):
270270
tokens = filter_str.upper().split()
271271
if not tokens or tokens[-1] == "*":
272272
del tokens[:]
273273
self.transparent = not tokens
274274
tokens.reverse()
275275
self._tokens = tuple(tokens)
276276

277-
def __call__(self, hash_name):
277+
def __call__(self, hash_name: str) -> bool:
278278
if self.transparent:
279279
return True
280280
matches = ("*", hash_name)
@@ -286,7 +286,9 @@ def __call__(self, hash_name):
286286
return False
287287

288288

289-
def _apply_hash_filter(digests, hash_filter):
289+
def _apply_hash_filter(
290+
digests: dict[str, str], hash_filter: callable[[str], bool]
291+
) -> dict[str, str]:
290292
"""
291293
Return a new dict containing the filtered digests, or the same
292294
dict if no changes are necessary. This will always preserve at
@@ -321,7 +323,12 @@ def _apply_hash_filter(digests, hash_filter):
321323
return digests
322324

323325

324-
def verify_all(filename, mydict, calc_prelink=0, strict=0):
326+
def verify_all(
327+
filename: str | bytes,
328+
mydict: dict[str, any],
329+
calc_prelink: int = 0,
330+
strict: int = 0,
331+
):
325332
"""
326333
Verify all checksums against a file.
327334
@@ -388,7 +395,9 @@ def verify_all(filename, mydict, calc_prelink=0, strict=0):
388395
return file_is_ok, reason
389396

390397

391-
def perform_checksum(filename, hashname="MD5", calc_prelink=0):
398+
def perform_checksum(
399+
filename: str, hashname: str = "MD5", calc_prelink: int = 0
400+
) -> tuple[bytes, int]:
392401
"""
393402
Run a specific checksum against a file. The filename can
394403
be either unicode or an encoded byte string. If filename
@@ -450,7 +459,9 @@ def perform_checksum(filename, hashname="MD5", calc_prelink=0):
450459
del e
451460

452461

453-
def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
462+
def perform_multiple_checksums(
463+
filename: str, hashes: list[str] = ["MD5"], calc_prelink: int = 0
464+
) -> dict[str, bytes]:
454465
"""
455466
Run a group of checksums against a file.
456467
@@ -475,7 +486,7 @@ def perform_multiple_checksums(filename, hashes=["MD5"], calc_prelink=0):
475486
return rVal
476487

477488

478-
def checksum_str(data, hashname="MD5"):
489+
def checksum_str(data: bytes, hashname: str = "MD5"):
479490
"""
480491
Run a specific checksum against a byte string.
481492

0 commit comments

Comments
 (0)