1818from __future__ import annotations
1919
2020import json
21- from typing import TYPE_CHECKING , Any , Final
21+ from typing import TYPE_CHECKING , Any
2222
2323import aiofiles
2424import aiofiles .os
25- import blake3
2625import pydantic
2726
27+ import atr .hashes as hashes
2828import atr .log as log
2929import atr .models .attestable as models
3030import atr .util as util
31+ from atr .models .attestable import AttestableChecksV1
3132
3233if TYPE_CHECKING :
3334 import pathlib
3435
35- _HASH_CHUNK_SIZE : Final [int ] = 4 * 1024 * 1024
36-
3736
3837def attestable_path (project_name : str , version_name : str , revision_number : str ) -> pathlib .Path :
3938 return util .get_attestable_dir () / project_name / version_name / f"{ revision_number } .json"
@@ -43,18 +42,14 @@ def attestable_paths_path(project_name: str, version_name: str, revision_number:
4342 return util .get_attestable_dir () / project_name / version_name / f"{ revision_number } .paths.json"
4443
4544
46- async def compute_file_hash (path : pathlib .Path ) -> str :
47- hasher = blake3 .blake3 ()
48- async with aiofiles .open (path , "rb" ) as f :
49- while chunk := await f .read (_HASH_CHUNK_SIZE ):
50- hasher .update (chunk )
51- return f"blake3:{ hasher .hexdigest ()} "
52-
53-
5445def github_tp_payload_path (project_name : str , version_name : str , revision_number : str ) -> pathlib .Path :
5546 return util .get_attestable_dir () / project_name / version_name / f"{ revision_number } .github-tp.json"
5647
5748
49+ def attestable_checks_path (project_name : str , version_name : str , revision_number : str ) -> pathlib .Path :
50+ return util .get_attestable_dir () / project_name / version_name / f"{ revision_number } .checks.json"
51+
52+
5853async def github_tp_payload_write (
5954 project_name : str , version_name : str , revision_number : str , github_payload : dict [str , Any ]
6055) -> None :
@@ -99,6 +94,22 @@ async def load_paths(
9994 return None
10095
10196
97+ async def load_checks (
98+ project_name : str ,
99+ version_name : str ,
100+ revision_number : str ,
101+ ) -> list [int ] | None :
102+ file_path = attestable_checks_path (project_name , version_name , revision_number )
103+ if await aiofiles .os .path .isfile (file_path ):
104+ try :
105+ async with aiofiles .open (file_path , encoding = "utf-8" ) as f :
106+ data = json .loads (await f .read ())
107+ return models .AttestableChecksV1 .model_validate (data ).checks
108+ except (json .JSONDecodeError , pydantic .ValidationError ) as e :
109+ log .warning (f"Could not parse { file_path } : { e } " )
110+ return []
111+
112+
102113def migrate_to_paths_files () -> int :
103114 attestable_dir = util .get_attestable_dir ()
104115 if not attestable_dir .is_dir ():
@@ -140,26 +151,52 @@ async def paths_to_hashes_and_sizes(directory: pathlib.Path) -> tuple[dict[str,
140151 if "\\ " in path_key :
141152 # TODO: We should centralise this, and forbid some other characters too
142153 raise ValueError (f"Backslash in path is forbidden: { path_key } " )
143- path_to_hash [path_key ] = await compute_file_hash (full_path )
154+ path_to_hash [path_key ] = await hashes . compute_file_hash (full_path )
144155 path_to_size [path_key ] = (await aiofiles .os .stat (full_path )).st_size
145156 return path_to_hash , path_to_size
146157
147158
148- async def write (
159+ async def write_files_data (
149160 project_name : str ,
150161 version_name : str ,
151162 revision_number : str ,
163+ release_policy : dict [str , Any ] | None ,
152164 uploader_uid : str ,
153165 previous : models .AttestableV1 | None ,
154166 path_to_hash : dict [str , str ],
155167 path_to_size : dict [str , int ],
156168) -> None :
157- result = _generate (path_to_hash , path_to_size , revision_number , uploader_uid , previous )
169+ result = _generate_files_data (path_to_hash , path_to_size , revision_number , release_policy , uploader_uid , previous )
158170 file_path = attestable_path (project_name , version_name , revision_number )
159171 await util .atomic_write_file (file_path , result .model_dump_json (indent = 2 ))
160172 paths_result = models .AttestablePathsV1 (paths = result .paths )
161173 paths_file_path = attestable_paths_path (project_name , version_name , revision_number )
162174 await util .atomic_write_file (paths_file_path , paths_result .model_dump_json (indent = 2 ))
175+ checks_file_path = attestable_checks_path (project_name , version_name , revision_number )
176+ if not checks_file_path .exists ():
177+ async with aiofiles .open (checks_file_path , "w" , encoding = "utf-8" ) as f :
178+ await f .write (models .AttestableChecksV1 ().model_dump_json (indent = 2 ))
179+
180+
181+ async def write_checks_data (
182+ project_name : str ,
183+ version_name : str ,
184+ revision_number : str ,
185+ checks : list [int ],
186+ ) -> None :
187+ log .info (f"Writing checks for { project_name } /{ version_name } /{ revision_number } : { checks } " )
188+
189+ def modify (content : str ) -> str :
190+ try :
191+ current = AttestableChecksV1 .model_validate_json (content ).checks
192+ except pydantic .ValidationError :
193+ current = []
194+ new_checks = set (current or [])
195+ new_checks .update (checks )
196+ result = models .AttestableChecksV1 (checks = sorted (new_checks ))
197+ return result .model_dump_json (indent = 2 )
198+
199+ await util .atomic_modify_file (attestable_checks_path (project_name , version_name , revision_number ), modify )
163200
164201
165202def _compute_hashes_with_attribution (
@@ -197,10 +234,11 @@ def _compute_hashes_with_attribution(
197234 return new_hashes
198235
199236
200- def _generate (
237+ def _generate_files_data (
201238 path_to_hash : dict [str , str ],
202239 path_to_size : dict [str , int ],
203240 revision_number : str ,
241+ release_policy : dict [str , Any ] | None ,
204242 uploader_uid : str ,
205243 previous : models .AttestableV1 | None ,
206244) -> models .AttestableV1 :
@@ -215,4 +253,5 @@ def _generate(
215253 return models .AttestableV1 (
216254 paths = dict (path_to_hash ),
217255 hashes = dict (new_hashes ),
256+ policy = release_policy or {},
218257 )
0 commit comments