Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
165 changes: 142 additions & 23 deletions tests/end2end/conftest.py
Original file line number Diff line number Diff line change
@@ -1,33 +1,152 @@
import os
import sys
import tempfile
import time

# Ensure this directory is on sys.path so test files can import constants.py
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))

import boto3
import os
import pytest
import requests
from boto3.exceptions import S3UploadFailedError
from botocore.exceptions import EndpointConnectionError

from constants import BUCKETS


# ---------------------------------------------------------------------------
# Internal helpers (not fixtures)
# ---------------------------------------------------------------------------

def _delete_bucket(client, bucket: str) -> None:
"""Delete a bucket and all its objects."""
while True:
objects = client.list_objects(Bucket=bucket)
content = objects.get('Contents', [])
if not content:
break
for obj in content:
client.delete_object(Bucket=bucket, Key=obj['Key'])
client.delete_bucket(Bucket=bucket)


def _s3_healthcheck(client) -> None:
"""Retry S3 connectivity until backend is ready."""
bucket = 'artifacts-healthcheck'
filename = tempfile.mktemp()
with open(filename, 'wb') as fd:
fd.write(os.urandom(1024))
try:
for attempt in range(10):
try:
client.create_bucket(Bucket=bucket)
client.upload_file(filename, bucket, filename)
_delete_bucket(client, bucket)
return
except (S3UploadFailedError, EndpointConnectionError):
time.sleep(attempt + 1)
raise RuntimeError("S3 backend never became healthy")
finally:
os.remove(filename)

@pytest.fixture(scope="class")
def s3_client(request):

# ---------------------------------------------------------------------------
# Session-scoped fixtures (created once per pytest run)
# ---------------------------------------------------------------------------

@pytest.fixture(scope="session")
def s3_client():
"""Boto3 S3 client pointed at the test cloudserver backend."""
session = boto3.session.Session()
s3_client = session.client(
client = session.client(
service_name='s3',
aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID', 'accessKey1'),
aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY', 'verySecretKey1'),
endpoint_url=os.getenv('ENDPOINT_URL', 'http://cloudserver-front:8000')
endpoint_url=os.getenv('ENDPOINT_URL', 'http://cloudserver-front:8000'),
)
request.cls.s3_client = s3_client

@pytest.fixture(scope="class")
def artifacts_url(request):
request.cls.artifacts_url = os.getenv('ARTIFACTS_URL', 'http://localhost')

@pytest.fixture(scope="class")
def container(request):
request.cls.container = 'githost:owner:repo:staging-8e50acc6a1.pre-merge.28.1'

@pytest.fixture(scope="class")
def buckets(request):
buckets = (
'artifacts-staging',
'artifacts-promoted',
'artifacts-prolonged'
)
request.cls.buckets = buckets
_s3_healthcheck(client)
return client


@pytest.fixture(scope="session")
def artifacts_url() -> str:
return os.getenv('ARTIFACTS_URL', 'http://artifacts')


# ---------------------------------------------------------------------------
# Function-scoped fixtures (created fresh for each test)
# ---------------------------------------------------------------------------

@pytest.fixture(autouse=True)
def managed_buckets(s3_client):
"""Create the three artifact buckets before each test; destroy them after."""
for bucket in BUCKETS:
s3_client.create_bucket(Bucket=bucket)
yield
for bucket in BUCKETS:
_delete_bucket(s3_client, bucket)


@pytest.fixture
def session():
"""Requests session authenticated as a user with full upload rights."""
s = requests.Session()
s.auth = ('username-pass', 'fake-password')
return s


@pytest.fixture
def restricted_session():
"""Requests session authenticated as a read-only user (no upload/copy)."""
s = requests.Session()
s.auth = ('username-pass-no-restricted-paths', 'fake-password')
return s


@pytest.fixture
def bot_session():
"""Requests session using the local bot credentials."""
s = requests.Session()
s.auth = ('botuser', 'botpass')
return s


@pytest.fixture
def anon_session():
"""Unauthenticated requests session."""
return requests.Session()


# ---------------------------------------------------------------------------
# Factory fixtures for common setup operations
# ---------------------------------------------------------------------------

@pytest.fixture
def upload_file(session, artifacts_url):
"""Factory: upload bytes to ``/upload/<build>/<path>``."""
def _upload(build: str, path: str, data: bytes = b'test content') -> requests.Response:
url = f'{artifacts_url}/upload/{build}/{path}'
resp = session.put(url, data=data)
assert resp.status_code == 200, f'upload {path}: {resp.status_code} {resp.text}'
return resp
return _upload


@pytest.fixture
def finish_build(session, artifacts_url):
"""Factory: mark a build finished by uploading ``.final_status``.

Also sends a ``ForceCacheUpdate`` GET to flush the nginx proxy cache for
that object, so subsequent ``/copy/`` or ``/last_success/`` calls see it.
"""
def _finish(build: str, status: str = 'SUCCESSFUL') -> None:
url = f'{artifacts_url}/upload/{build}/.final_status'
resp = session.put(url, data=status.encode())
assert resp.status_code == 200
# Flush proxy cache
session.get(
f'{artifacts_url}/download/{build}/.final_status',
headers={'ForceCacheUpdate': 'yes'},
)
return _finish
8 changes: 8 additions & 0 deletions tests/end2end/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
"""Shared constants for the artifacts end-to-end test suite."""

BUCKETS = ('artifacts-staging', 'artifacts-promoted', 'artifacts-prolonged')

# Build name examples that exercise each routing bucket.
STAGING_BUILD = 'githost:owner:repo:staging-8e50acc6a1.pre-merge.28.1'
PROMOTED_BUILD = 'githost:owner:repo:promoted-8e50acc6a1.rel.1'
PROLONGED_BUILD = 'githost:owner:repo:1.0.28.1'
90 changes: 90 additions & 0 deletions tests/end2end/test_auth.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
"""Tests for GitHub-based access control and local bot credentials."""

import pytest

from constants import STAGING_BUILD


# ---------------------------------------------------------------------------
# Authenticated users with full upload rights
# ---------------------------------------------------------------------------

def test_authenticated_user_can_upload(session, artifacts_url):
resp = session.put(
f'{artifacts_url}/upload/{STAGING_BUILD}/.final_status',
data=b'SUCCESSFUL',
headers={'Script-Name': '/foo'},
)
assert resp.status_code == 200


# ---------------------------------------------------------------------------
# Local bot credentials
# ---------------------------------------------------------------------------

def test_bot_credentials_can_download(bot_session, artifacts_url):
"""Bot user (local creds) can hit /download/ — 404 is fine, not 401/403."""
resp = bot_session.get(f'{artifacts_url}/download/{STAGING_BUILD}')
# The build doesn't exist, so we get 404 (redirect to trailing slash → 404).
# What matters is that we are not rejected with 401 or 403.
assert resp.status_code not in (401, 403)


# ---------------------------------------------------------------------------
# Restricted users (can read, cannot write)
# ---------------------------------------------------------------------------

def test_restricted_user_can_download(restricted_session, artifacts_url):
"""User without upload permission can still browse /download/."""
resp = restricted_session.get(f'{artifacts_url}/download/{STAGING_BUILD}/')
# Build doesn't exist → 404; not 401 or 403
assert resp.status_code == 404


def test_restricted_user_cannot_upload(restricted_session, artifacts_url):
resp = restricted_session.put(
f'{artifacts_url}/upload/{STAGING_BUILD}/.final_status',
data=b'SUCCESSFUL',
)
assert resp.status_code == 403


def test_restricted_user_cannot_copy(restricted_session, artifacts_url):
copy_build = f'copy_of_{STAGING_BUILD}'
resp = restricted_session.get(
f'{artifacts_url}/copy/{STAGING_BUILD}/{copy_build}/'
)
assert resp.status_code == 403


def test_restricted_user_cannot_add_metadata(restricted_session, artifacts_url):
resp = restricted_session.get(
f'{artifacts_url}/add_metadata/fake/args'
)
assert resp.status_code == 403


# ---------------------------------------------------------------------------
# Failing / missing credentials
# ---------------------------------------------------------------------------

def test_failing_github_user_is_forbidden(artifacts_url):
"""A user rejected by the (fake) GitHub API receives 403."""
s = __import__('requests').Session()
s.auth = ('username-fail', 'fake-password')
resp = s.put(
f'{artifacts_url}/upload/{STAGING_BUILD}/.final_status',
data=b'SUCCESSFUL',
headers={'Script-Name': '/foo'},
)
assert resp.status_code == 403


def test_unauthenticated_request_is_unauthorized(anon_session, artifacts_url):
"""No Authorization header → 401."""
resp = anon_session.put(
f'{artifacts_url}/upload/{STAGING_BUILD}/.final_status',
data=b'SUCCESSFUL',
headers={'Script-Name': '/foo'},
)
assert resp.status_code == 401
92 changes: 92 additions & 0 deletions tests/end2end/test_copy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
"""Tests for the build copy (/copy/) endpoint."""

import pytest

from constants import STAGING_BUILD


COPY_BUILD = f'copy_of_{STAGING_BUILD}'


def test_copy_aborted_when_no_final_status(session, artifacts_url, upload_file):
"""Copy is refused when the source has no .final_status file."""
for i in range(5):
upload_file(STAGING_BUILD, f'obj-{i}', b'content')

resp = session.get(f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/')
assert resp.status_code == 200
last_line = resp.content.splitlines()[-1]
assert last_line == b'SOURCE BUILD NOT FINISHED (NO ".final_status" FOUND), ABORTING'


def test_copy_succeeds_after_final_status(
session, artifacts_url, upload_file, finish_build
):
"""Copy succeeds once .final_status is present."""
for i in range(5):
upload_file(STAGING_BUILD, f'obj-{i}', b'content')
finish_build(STAGING_BUILD)

resp = session.get(f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/')
assert resp.status_code == 200
assert resp.content.splitlines()[-1] == b'BUILD COPIED'


def test_copy_source_and_target_listings_are_identical(
session, artifacts_url, upload_file, finish_build
):
"""After copy, source and target flat listings are byte-for-byte equal."""
for i in range(1024):
upload_file(STAGING_BUILD, f'obj-{i}', b'x')
finish_build(STAGING_BUILD)

session.get(f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/')

src = session.get(f'{artifacts_url}/download/{STAGING_BUILD}/?format=txt')
tgt = session.get(f'{artifacts_url}/download/{COPY_BUILD}/?format=txt')
assert src.status_code == 200
assert tgt.status_code == 200
assert len(src.content.splitlines()) == 1026 # 1024 objs + .final_status + .original_build
assert src.content == tgt.content


def test_copy_fails_when_target_already_exists(
session, artifacts_url, upload_file, finish_build
):
"""A second copy to the same target is rejected with FAILED."""
upload_file(STAGING_BUILD, 'file.txt', b'data')
finish_build(STAGING_BUILD)

session.get(f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/')

# Second attempt — target is not empty
resp = session.get(f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/')
assert resp.status_code == 200
lines = resp.content.splitlines()
expected_check_line = (
b"Checking if the target reference '%b' is empty"
% COPY_BUILD.encode()
)
assert lines[-2] == expected_check_line
assert lines[-1] == b'FAILED'


def test_copy_behind_ingress(session, artifacts_url, upload_file, finish_build):
"""Copy works correctly when a Script-Name ingress header is present."""
upload_file(STAGING_BUILD, '.final_status', b'SUCCESSFUL',)
# flush cache manually (finish_build would also work, but let's stay direct)
finish_build(STAGING_BUILD)

resp = session.get(
f'{artifacts_url}/copy/{STAGING_BUILD}/{COPY_BUILD}/',
headers={'Script-Name': '/foo'},
)
assert resp.status_code == 200
assert resp.content.splitlines()[-1] == b'BUILD COPIED'

# Download via ingress path should work too
dl = session.get(
f'{artifacts_url}/download/{STAGING_BUILD}/.final_status',
headers={'Script-Name': '/foo', 'ForceCacheUpdate': 'yes'},
)
assert dl.status_code == 200
Loading
Loading