|
1 | 1 | from __future__ import annotations |
2 | 2 |
|
3 | 3 | import json |
| 4 | +import os |
| 5 | +import tempfile |
| 6 | + |
| 7 | +import pytest |
| 8 | +from pytest import raises |
4 | 9 |
|
5 | 10 | from diracx import cli |
| 11 | +from diracx.core.models import ScalarSearchSpec |
| 12 | +from diracx.core.preferences import get_diracx_preferences |
6 | 13 |
|
| 14 | +TEST_JDL = """ |
| 15 | + Arguments = "jobDescription.xml -o LogLevel=INFO"; |
| 16 | + Executable = "dirac-jobexec"; |
| 17 | + JobGroup = jobGroup; |
| 18 | + JobName = jobName; |
| 19 | + JobType = User; |
| 20 | + LogLevel = INFO; |
| 21 | + OutputSandbox = |
| 22 | + { |
| 23 | + Script1_CodeOutput.log, |
| 24 | + std.err, |
| 25 | + std.out |
| 26 | + }; |
| 27 | + Priority = 1; |
| 28 | + Site = ANY; |
| 29 | + StdError = std.err; |
| 30 | + StdOutput = std.out; |
| 31 | +""" |
| 32 | + |
| 33 | + |
| 34 | +@pytest.fixture |
| 35 | +async def jdl_file(): |
| 36 | + with tempfile.NamedTemporaryFile(mode="w", encoding="utf-8") as temp_file: |
| 37 | + temp_file.write(TEST_JDL) |
| 38 | + temp_file.flush() |
| 39 | + yield temp_file.name |
| 40 | + |
| 41 | + |
| 42 | +async def test_submit(with_cli_login, jdl_file, capfd): |
| 43 | + """Test submitting a job using a JDL file.""" |
| 44 | + |
| 45 | + with open(jdl_file, "r") as temp_file: |
| 46 | + await cli.jobs.submit([temp_file]) |
7 | 47 |
|
8 | | -async def test_search(with_cli_login, capfd): |
9 | | - await cli.jobs.search() |
10 | 48 | cap = capfd.readouterr() |
11 | 49 | assert cap.err == "" |
| 50 | + assert "Inserted 1 jobs with ids" in cap.out |
| 51 | + |
| 52 | + |
| 53 | +async def test_search(with_cli_login, jdl_file, capfd): |
| 54 | + """Test searching for jobs.""" |
| 55 | + |
| 56 | + # Submit 20 jobs |
| 57 | + with open(jdl_file, "r") as temp_file: |
| 58 | + await cli.jobs.submit([temp_file] * 20) |
| 59 | + |
| 60 | + cap = capfd.readouterr() |
| 61 | + |
12 | 62 | # By default the output should be in JSON format as capfd is not a TTY |
13 | | - json.loads(cap.out) |
| 63 | + await cli.jobs.search() |
| 64 | + cap = capfd.readouterr() |
| 65 | + assert cap.err == "" |
| 66 | + jobs = json.loads(cap.out) |
| 67 | + |
| 68 | + # There should be 10 jobs by default |
| 69 | + assert len(jobs) == 10 |
| 70 | + assert "JobID" in jobs[0] |
| 71 | + assert "JobGroup" in jobs[0] |
| 72 | + |
| 73 | + # Change per-page to a very large number to get all the jobs at once: the caption should change |
| 74 | + await cli.jobs.search(per_page=9999) |
| 75 | + cap = capfd.readouterr() |
| 76 | + assert cap.err == "" |
| 77 | + jobs = json.loads(cap.out) |
| 78 | + |
| 79 | + # There should be 20 jobs at least now |
| 80 | + assert len(jobs) >= 20 |
| 81 | + assert "JobID" in cap.out |
| 82 | + assert "JobGroup" in cap.out |
| 83 | + |
| 84 | + # Search for a job that doesn't exist |
| 85 | + condition = ScalarSearchSpec(parameter="Status", operator="eq", value="nonexistent") |
| 86 | + await cli.jobs.search(condition=[condition]) |
| 87 | + cap = capfd.readouterr() |
| 88 | + assert cap.err == "" |
| 89 | + assert "[]" == cap.out.strip() |
| 90 | + |
| 91 | + # Switch to RICH output |
| 92 | + get_diracx_preferences.cache_clear() |
| 93 | + os.environ["DIRACX_OUTPUT_FORMAT"] = "RICH" |
| 94 | + |
| 95 | + await cli.jobs.search() |
| 96 | + cap = capfd.readouterr() |
| 97 | + assert cap.err == "" |
| 98 | + |
| 99 | + with raises(json.JSONDecodeError): |
| 100 | + json.loads(cap.out) |
| 101 | + |
| 102 | + assert "JobID" in cap.out |
| 103 | + assert "JobGroup" in cap.out |
| 104 | + assert "Showing 0-9 of " in cap.out |
| 105 | + |
| 106 | + # Change per-page to a very large number to get all the jobs at once: the caption should change |
| 107 | + await cli.jobs.search(per_page=9999) |
| 108 | + cap = capfd.readouterr() |
| 109 | + assert cap.err == "" |
| 110 | + |
| 111 | + with raises(json.JSONDecodeError): |
| 112 | + json.loads(cap.out) |
| 113 | + |
| 114 | + assert "JobID" in cap.out |
| 115 | + assert "JobGroup" in cap.out |
| 116 | + assert "Showing all jobs" in cap.out |
| 117 | + |
| 118 | + # Search for a job that doesn't exist |
| 119 | + await cli.jobs.search(condition=[condition]) |
| 120 | + cap = capfd.readouterr() |
| 121 | + assert cap.err == "" |
| 122 | + assert "No jobs found" in cap.out |
0 commit comments