Skip to content

Commit e236a61

Browse files
authored
Merge pull request ClickHouse#79089 from ClickHouse/ci_more_debugging_logs
CI: Add debug logs
2 parents 7106c93 + eb27b7e commit e236a61

File tree

5 files changed

+51
-49
lines changed

5 files changed

+51
-49
lines changed

ci/praktika/hook_html.py

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,11 @@ def push_pending_ci_report(cls, _workflow):
142142
# fetch running status with start_time for current job
143143
result = Result.from_fs(job.name)
144144
else:
145-
result = Result.generate_pending(job.name)
145+
result = Result.create_new(job.name, Result.Status.PENDING)
146146
results.append(result)
147-
summary_result = Result.generate_pending(_workflow.name, results=results)
147+
summary_result = Result.create_new(
148+
_workflow.name, Result.Status.RUNNING, results=results
149+
)
148150
summary_result.start_time = Utils.timestamp()
149151
summary_result.links.append(env.CHANGE_URL)
150152
summary_result.links.append(env.RUN_URL)
@@ -200,18 +202,26 @@ def configure(cls, _workflow):
200202
sha=cache_record.sha,
201203
job_name=skipped_job,
202204
)
203-
result = Result.generate_skipped(
204-
skipped_job, [report_link], "reused from cache"
205+
result = Result.create_new(
206+
skipped_job,
207+
Result.Status.SKIPPED,
208+
[report_link],
209+
"reused from cache",
205210
)
206211
else:
207-
result = Result.generate_skipped(
208-
skipped_job, info=filtered_job_and_reason[skipped_job]
212+
result = Result.create_new(
213+
skipped_job,
214+
Result.Status.SKIPPED,
215+
info=filtered_job_and_reason[skipped_job],
209216
)
210217
results.append(result)
211218
if results:
212-
assert _ResultS3.update_workflow_results(
213-
_workflow.name, new_sub_results=results
214-
)
219+
assert (
220+
_ResultS3.update_workflow_results(
221+
_workflow.name, new_sub_results=results
222+
)
223+
is None
224+
), "Workflow status supposed to remain 'running'"
215225

216226
@classmethod
217227
def pre_run(cls, _workflow, _job):

ci/praktika/parser.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -236,10 +236,6 @@ def parse(self):
236236
assert (
237237
False
238238
), f"Artifact [{artifact_name}] has unsupported type [{artifact.type}]"
239-
if not artifact.required_by and artifact.type != Artifact.Type.PHONY:
240-
print(
241-
f"WARNING: Artifact [{artifact_name}] provided by job [{artifact.provided_by}] in workflow [{self.workflow_name}] has no job that requires it"
242-
)
243239
if artifact.type == Artifact.Type.GH:
244240
self.workflow_yaml_config.job_to_config[
245241
artifact.provided_by

ci/praktika/result.py

Lines changed: 19 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -344,23 +344,10 @@ def add_ext_key_value(self, key, value):
344344
return self
345345

346346
@classmethod
347-
def generate_pending(cls, name, results=None):
347+
def create_new(cls, name, status, links=None, info="", results=None):
348348
return Result(
349349
name=name,
350-
status=Result.Status.PENDING,
351-
start_time=None,
352-
duration=None,
353-
results=results or [],
354-
files=[],
355-
links=[],
356-
info="",
357-
)
358-
359-
@classmethod
360-
def generate_skipped(cls, name, links=None, info="", results=None):
361-
return Result(
362-
name=name,
363-
status=Result.Status.SKIPPED,
350+
status=status,
364351
start_time=None,
365352
duration=None,
366353
results=results or [],
@@ -511,10 +498,10 @@ def to_stdout_formatted(self, indent="", res=""):
511498

512499
class ResultInfo:
513500
SETUP_ENV_JOB_FAILED = (
514-
"Failed to set up job env, it's praktika bug or misconfiguration"
501+
"Failed to set up job env, it is praktika bug or misconfiguration"
515502
)
516503
PRE_JOB_FAILED = (
517-
"Failed to do a job pre-run step, it's praktika bug or misconfiguration"
504+
"Failed to do a job pre-run step, it is praktika bug or misconfiguration"
518505
)
519506
KILLED = "Job killed or terminated, no Result provided"
520507
NOT_FOUND_IMPOSSIBLE = (
@@ -556,20 +543,21 @@ def copy_result_from_s3_with_version(cls, local_path):
556543
env = _Environment.get()
557544
file_name = Path(local_path).name
558545
local_dir = Path(local_path).parent
559-
file_name_pattern = f"{file_name}_*"
560-
for file_path in local_dir.glob(file_name_pattern):
561-
file_path.unlink()
562-
s3_path = f"{Settings.HTML_S3_PATH}/{env.get_s3_prefix()}/"
563-
S3.copy_file_from_s3_matching_pattern(
564-
s3_path=s3_path, local_path=local_dir, include=file_name_pattern
546+
s3_path = f"{Settings.HTML_S3_PATH}/{env.get_s3_prefix()}"
547+
latest_result_file = Shell.get_output(
548+
f"aws s3 ls {s3_path}/{file_name}_ | awk '{{print $4}}' | sort -r | head -n 1",
549+
strict=True,
550+
verbose=True,
551+
)
552+
version = int(latest_result_file.split("_")[-1])
553+
S3.copy_file_from_s3(
554+
s3_path=f"{s3_path}/{latest_result_file}", local_path=local_dir
555+
)
556+
Shell.check(
557+
f"cp {local_dir}/{latest_result_file} {local_path}",
558+
strict=True,
559+
verbose=True,
565560
)
566-
result_files = []
567-
for file_path in local_dir.glob(file_name_pattern):
568-
result_files.append(file_path)
569-
assert result_files, "No result files found"
570-
result_files.sort()
571-
version = int(result_files[-1].name.split("_")[-1])
572-
Shell.check(f"cp {result_files[-1]} {local_path}", strict=True, verbose=True)
573561
return version
574562

575563
@classmethod
@@ -698,6 +686,7 @@ def update_workflow_results(
698686
# when multiple concurrent jobs attempt to update the workflow report
699687
time.sleep(random.uniform(0, 2))
700688

689+
print(f"Workflow status changed: [{prev_status}] -> [{new_status}]")
701690
if prev_status != new_status:
702691
return new_status
703692
else:

ci/praktika/runner.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ def generate_local_run_environment(workflow, job, pr=None, sha=None):
7373

7474
workflow_config.dump()
7575

76-
Result.generate_pending(job.name).dump()
76+
Result.create_from(name=job.name, status=Result.Status.PENDING).dump()
7777

7878
def _setup_env(self, _workflow, job):
7979
# source env file to write data into fs (workflow config json, workflow status json)
@@ -323,7 +323,7 @@ def _post_run(
323323
info=info,
324324
).dump()
325325
elif prerun_exit_code != 0:
326-
info = f"ERROR: {ResultInfo.PRE_JOB_FAILED}"
326+
info = ResultInfo.PRE_JOB_FAILED
327327
print(info)
328328
# set Result with error and logs
329329
Result(

ci/praktika/s3.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,8 @@ def add_uploaded(cls, file_path):
7272
file_zize = cls.get_size_bytes(file_path)
7373
usage.uploaded += file_zize
7474
if file_name in usage.uploaded_details:
75-
print(f"WARNING: Duplicated upload for filename [{file_name}]")
75+
if not file_name.startswith("result_"):
76+
print(f"WARNING: Duplicated upload for filename [{file_name}]")
7677
usage.uploaded_details[file_name] += file_zize
7778
else:
7879
usage.uploaded_details[file_name] = file_zize
@@ -109,7 +110,7 @@ def clean_s3_directory(cls, s3_path, include=""):
109110
cmd = f"aws s3 rm s3://{s3_path} --recursive"
110111
if include:
111112
cmd += f' --exclude "*" --include "{include}"'
112-
cls.run_command_with_retries(cmd, retries=1)
113+
cls.run_command_with_retries(cmd, retries=1, with_stderr=True)
113114
return
114115

115116
@classmethod
@@ -196,7 +197,11 @@ def put(
196197

197198
@classmethod
198199
def run_command_with_retries(
199-
cls, command, retries=Settings.MAX_RETRIES_S3, no_strict=False
200+
cls,
201+
command,
202+
retries=Settings.MAX_RETRIES_S3,
203+
no_strict=False,
204+
with_stderr=False,
200205
):
201206
i = 0
202207
res = False
@@ -224,6 +229,8 @@ def run_command_with_retries(
224229
print(
225230
f"ERROR: aws s3 cp failed, stdout/stderr err: [{stderr}], out [{stdout}]"
226231
)
232+
elif with_stderr and (stdout or stderr):
233+
print(f"stdout: {stdout}\nstderr: {stderr}")
227234
res = ret_code == 0
228235
if not res and not no_strict:
229236
raise RuntimeError(f"s3 command failed: [{stderr}]")
@@ -275,7 +282,7 @@ def copy_file_from_s3_matching_pattern(
275282
).is_dir(), f"Path [{local_path}] does not exist or not a directory"
276283
assert s3_path.endswith("/"), f"s3 path is invalid [{s3_path}]"
277284
cmd = f'aws s3 cp s3://{s3_path} {local_path} --exclude "{exclude}" --include "{include}" --recursive'
278-
res = cls.run_command_with_retries(cmd, no_strict=no_strict)
285+
res = cls.run_command_with_retries(cmd, no_strict=no_strict, with_stderr=True)
279286
if res:
280287
print(
281288
"TODO: support StorageUsage.add_downloaded with matching pattern download"

0 commit comments

Comments
 (0)