diff --git a/ta_storage/pg.py b/ta_storage/pg.py index bb164dde4..c829a3adf 100644 --- a/ta_storage/pg.py +++ b/ta_storage/pg.py @@ -222,7 +222,7 @@ def save_tests(db_session: Session, tests_to_write: dict[str, dict[str, Any]]): test_insert = insert(Test.__table__).values(test_data) insert_on_conflict_do_update = test_insert.on_conflict_do_update( - index_elements=["repoid", "name", "testsuite", "flags_hash"], + index_elements=["id"], set_={ "framework": test_insert.excluded.framework, "computed_name": test_insert.excluded.computed_name, diff --git a/tasks/ta_finisher.py b/tasks/ta_finisher.py index 5d2d21ff2..32ae3a463 100644 --- a/tasks/ta_finisher.py +++ b/tasks/ta_finisher.py @@ -24,6 +24,7 @@ from helpers.string import EscapeEnum, Replacement, StringEscaper, shorten_file_paths from services.activation import activate_user from services.lock_manager import LockManager, LockRetry, LockType +from services.redis import get_redis_connection from services.repository import ( EnrichedPull, TorngitBaseAdapter, @@ -83,8 +84,11 @@ def queue_optional_tasks( commit_yaml: UserYaml, branch: str | None, ): + redis_client = get_redis_connection() + if should_do_flaky_detection(repo, commit_yaml): if commit.merged is True or branch == repo.branch: + redis_client.set(f"flake_uploads:{repo.repoid}", 0) process_flakes_task_sig = process_flakes_task.s( repo_id=repo.repoid, commit_id=commit.commitid, diff --git a/tasks/test_results_processor.py b/tasks/test_results_processor.py index 2a5e7a904..bfaadb128 100644 --- a/tasks/test_results_processor.py +++ b/tasks/test_results_processor.py @@ -1,8 +1,6 @@ from __future__ import annotations -import base64 import logging -import zlib from dataclasses import dataclass from datetime import date, datetime from typing import Literal, TypedDict @@ -380,10 +378,6 @@ def save_test_flag_bridges( db_session.execute(insert_on_conflict_do_nothing_flags) db_session.commit() - def decode_raw_file(self, file: bytes) -> bytes: - file_bytes = zlib.decompress(base64.b64decode(file)) - return file_bytes - def parse_file( self, file_bytes: bytes, @@ -438,8 +432,6 @@ def process_individual_upload( parsing_results, readable_files = result - upload.state = "processed" - if all(len(result["testruns"]) == 0 for result in parsing_results): successful = False log.error( @@ -460,6 +452,7 @@ def process_individual_upload( upload.flag_names, ) + upload.state = "processed" db_session.commit() log.info( @@ -469,24 +462,14 @@ def process_individual_upload( if should_delete_archive: self.delete_archive(archive_service, upload) else: + log.info( + "Writing readable files to archive", + extra=dict(upload_id=upload_id, readable_files=readable_files), + ) archive_service.write_file(upload.storage_path, readable_files) return {"successful": successful} - def rewrite_readable( - self, network: list[str] | None, report_contents: list[ReadableFile] - ) -> bytes: - buffer = b"" - if network is not None: - for file in network: - buffer += f"{file}\n".encode("utf-8") - buffer += b"<<<<<< network\n\n" - for report_content in report_contents: - buffer += f"# path={report_content.path}\n".encode("utf-8") - buffer += report_content.contents - buffer += b"\n<<<<<< EOF\n\n" - return buffer - def should_delete_archive(self, commit_yaml): if get_config("services", "minio", "expire_raw_after_n_days"): return True