Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix test analytics #1016

Merged
merged 2 commits into from
Jan 20, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ta_storage/pg.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ def save_tests(db_session: Session, tests_to_write: dict[str, dict[str, Any]]):

test_insert = insert(Test.__table__).values(test_data)
insert_on_conflict_do_update = test_insert.on_conflict_do_update(
index_elements=["repoid", "name", "testsuite", "flags_hash"],
index_elements=["id"],
set_={
"framework": test_insert.excluded.framework,
"computed_name": test_insert.excluded.computed_name,
Expand Down
4 changes: 4 additions & 0 deletions tasks/ta_finisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from helpers.string import EscapeEnum, Replacement, StringEscaper, shorten_file_paths
from services.activation import activate_user
from services.lock_manager import LockManager, LockRetry, LockType
from services.redis import get_redis_connection
from services.repository import (
EnrichedPull,
TorngitBaseAdapter,
Expand Down Expand Up @@ -83,8 +84,11 @@ def queue_optional_tasks(
commit_yaml: UserYaml,
branch: str | None,
):
redis_client = get_redis_connection()

if should_do_flaky_detection(repo, commit_yaml):
if commit.merged is True or branch == repo.branch:
redis_client.set(f"flake_uploads:{repo.repoid}", 0)
process_flakes_task_sig = process_flakes_task.s(
repo_id=repo.repoid,
commit_id=commit.commitid,
Expand Down
27 changes: 5 additions & 22 deletions tasks/test_results_processor.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
from __future__ import annotations

import base64
import logging
import zlib
from dataclasses import dataclass
from datetime import date, datetime
from typing import Literal, TypedDict
Expand Down Expand Up @@ -380,10 +378,6 @@ def save_test_flag_bridges(
db_session.execute(insert_on_conflict_do_nothing_flags)
db_session.commit()

def decode_raw_file(self, file: bytes) -> bytes:
file_bytes = zlib.decompress(base64.b64decode(file))
return file_bytes

def parse_file(
self,
file_bytes: bytes,
Expand Down Expand Up @@ -438,8 +432,6 @@ def process_individual_upload(

parsing_results, readable_files = result

upload.state = "processed"

if all(len(result["testruns"]) == 0 for result in parsing_results):
successful = False
log.error(
Expand All @@ -460,6 +452,7 @@ def process_individual_upload(
upload.flag_names,
)

upload.state = "processed"
db_session.commit()

log.info(
Expand All @@ -469,24 +462,14 @@ def process_individual_upload(
if should_delete_archive:
self.delete_archive(archive_service, upload)
else:
log.info(
"Writing readable files to archive",
extra=dict(upload_id=upload_id, readable_files=readable_files),
)
archive_service.write_file(upload.storage_path, readable_files)

return {"successful": successful}

def rewrite_readable(
self, network: list[str] | None, report_contents: list[ReadableFile]
) -> bytes:
buffer = b""
if network is not None:
for file in network:
buffer += f"{file}\n".encode("utf-8")
buffer += b"<<<<<< network\n\n"
for report_content in report_contents:
buffer += f"# path={report_content.path}\n".encode("utf-8")
buffer += report_content.contents
buffer += b"\n<<<<<< EOF\n\n"
return buffer

def should_delete_archive(self, commit_yaml):
if get_config("services", "minio", "expire_raw_after_n_days"):
return True
Expand Down
Loading