Skip to content

Commit 8861406

Browse files
authored
Generate RSA keys job (#223)
* rsa_keys: save before merge * rsa_keys: make current tests work * rsa_keys: add tests * rsa_keys: use newer uv to have deps cooldown * rsa_keys: refactor
1 parent f0ad11b commit 8861406

File tree

26 files changed

+289
-32
lines changed

26 files changed

+289
-32
lines changed

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM ghcr.io/astral-sh/uv:bookworm-slim as builder
1+
FROM ghcr.io/astral-sh/uv:trixie-slim as builder
22

33
WORKDIR /app
44
COPY pyproject.toml uv.lock /app/

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ uv run pytest
4848
### Build docker image
4949

5050
```
51-
docker build --tag job_executor .
51+
docker buildx build --tag job-executor:test-local .
5252
```
5353

5454
## Built with

job_executor/adapter/datastore_api/__init__.py

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,11 @@
1616
Operation,
1717
)
1818
from job_executor.common.exceptions import HttpRequestError, HttpResponseError
19-
from job_executor.config import environment
19+
from job_executor.config import environment, secrets
2020

2121
DATASTORE_API_URL = environment.datastore_api_url
2222
DEFAULT_REQUESTS_TIMEOUT = (10, 60) # (read timeout, connect timeout)
23+
DATASTORE_API_SERVICE_KEY = secrets.datastore_api_service_key
2324

2425
logger = logging.getLogger()
2526

@@ -117,6 +118,25 @@ def get_datastore_directory(rdn: str) -> Path:
117118
return Path(DatastoreResponse.model_validate(response.json()).directory)
118119

119120

121+
def post_public_key(datastore_rdn: str, public_key_pem: bytes) -> None:
122+
"""
123+
Post the public RSA key to the datastore-api.
124+
125+
:param datastore_rdn: The RDN of the datastore
126+
:param public_key_pem: The public key in PEM format as bytes
127+
"""
128+
request_url = f"{DATASTORE_API_URL}/datastores/{datastore_rdn}/public-key"
129+
execute_request(
130+
"POST",
131+
request_url,
132+
data=public_key_pem,
133+
headers={
134+
"Content-Type": "application/x-pem-file",
135+
"X-API-Key": DATASTORE_API_SERVICE_KEY,
136+
},
137+
)
138+
139+
120140
def query_for_jobs() -> JobQueryResult:
121141
"""
122142
Retrieves different types of jobs based on the system's state
@@ -147,6 +167,7 @@ def query_for_jobs() -> JobQueryResult:
147167
Operation.REMOVE,
148168
Operation.ROLLBACK_REMOVE,
149169
Operation.DELETE_ARCHIVE,
170+
Operation.GENERATE_RSA_KEYS,
150171
],
151172
),
152173
queued_worker_jobs=get_jobs(

job_executor/adapter/datastore_api/models.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@ class Operation(StrEnum):
5050
REMOVE = "REMOVE"
5151
ROLLBACK_REMOVE = "ROLLBACK_REMOVE"
5252
DELETE_ARCHIVE = "DELETE_ARCHIVE"
53+
GENERATE_RSA_KEYS = "GENERATE_RSA_KEYS"
5354

5455

5556
class ReleaseStatus(StrEnum):

job_executor/adapter/fs/__init__.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,9 @@
44

55
from job_executor.adapter.fs.datastore_files import DatastoreDirectory
66
from job_executor.adapter.fs.input_files import InputDirectory
7+
from job_executor.adapter.fs.private_keys_directory import PrivateKeysDirectory
78
from job_executor.adapter.fs.working_files import WorkingDirectory
9+
from job_executor.config import environment
810

911

1012
class FileSystemAdapter(Protocol):
@@ -21,13 +23,17 @@ class LocalStorageAdapter:
2123
datastore_dir: DatastoreDirectory
2224
working_dir: WorkingDirectory
2325
input_dir: InputDirectory
26+
private_keys_dir: PrivateKeysDirectory
2427

25-
def __init__(self, datastore_dir_path: Path) -> None:
28+
def __init__(self, datastore_dir_path: Path, datastore_rdn: str) -> None:
2629
self.datastore_dir = DatastoreDirectory(datastore_dir_path)
2730
self.working_dir = WorkingDirectory(
2831
Path(f"{datastore_dir_path}_working")
2932
)
3033
self.input_dir = InputDirectory(Path(f"{datastore_dir_path}_input"))
34+
self.private_keys_dir = PrivateKeysDirectory(
35+
Path(environment.private_keys_dir) / datastore_rdn
36+
)
3137

3238
def move_working_dir_parquet_to_datastore(self, dataset_name: str) -> None:
3339
"""

job_executor/adapter/fs/datastore_files.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
class DatastoreDirectory:
2121
root_dir: Path
22-
vault_dir: Path
2322
data_dir: Path
2423
metadata_dir: Path
2524
draft_metadata_all_path: Path
@@ -31,7 +30,6 @@ def __init__(self, root_dir: Path) -> None:
3130
self.root_dir = root_dir
3231
self.data_dir = root_dir / "data"
3332
self.metadata_dir = root_dir / "datastore"
34-
self.vault_dir = root_dir / "vault"
3533
self.draft_version_path = self.metadata_dir / "draft_version.json"
3634
self.archive_dir = self.root_dir / "archive"
3735
self.draft_metadata_all_path = (
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import os
2+
from dataclasses import dataclass
3+
from pathlib import Path
4+
5+
6+
@dataclass
7+
class PrivateKeysDirectory:
8+
path_with_rdn: Path
9+
10+
def create(self) -> bool:
11+
if not self.path_with_rdn.exists():
12+
os.makedirs(self.path_with_rdn)
13+
return True
14+
return False
15+
16+
def save_private_key(self, microdata_private_key_pem: bytes) -> None:
17+
with open(self._get_private_key_location(), "wb") as file:
18+
file.write(microdata_private_key_pem)
19+
20+
def clean_up(self) -> bool:
21+
if self._get_private_key_location().exists():
22+
os.remove(self._get_private_key_location())
23+
return True
24+
return False
25+
26+
def _get_private_key_location(self) -> Path:
27+
return self.path_with_rdn / "microdata_private_key.pem"

job_executor/app.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def initialize_app() -> Manager:
2424
rollback.fix_interrupted_jobs()
2525
for rdn in datastore_api.get_datastores():
2626
local_storage = LocalStorageAdapter(
27-
datastore_api.get_datastore_directory(rdn)
27+
datastore_api.get_datastore_directory(rdn), rdn
2828
)
2929
if local_storage.datastore_dir.temporary_backup_exists():
3030
raise StartupException(f"tmp directory exists for {rdn}")

job_executor/config/__init__.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ class Environment:
1414
docker_host_name: str
1515
commit_id: str
1616
max_gb_all_workers: int
17+
private_keys_dir: str
1718

1819

1920
def _initialize_environment() -> Environment:
@@ -27,6 +28,7 @@ def _initialize_environment() -> Environment:
2728
docker_host_name=os.environ["DOCKER_HOST_NAME"],
2829
commit_id=os.environ["COMMIT_ID"],
2930
max_gb_all_workers=int(os.environ["MAX_GB_ALL_WORKERS"]),
31+
private_keys_dir=os.environ["PRIVATE_KEYS_DIR"],
3032
)
3133

3234

@@ -36,13 +38,15 @@ def _initialize_environment() -> Environment:
3638
@dataclass
3739
class Secrets:
3840
pseudonym_service_api_key: str
41+
datastore_api_service_key: str
3942

4043

4144
def _initialize_secrets() -> Secrets:
4245
with open(environment.secrets_file, encoding="utf-8") as f:
4346
secrets_file = json.load(f)
4447
return Secrets(
45-
pseudonym_service_api_key=secrets_file["PSEUDONYM_SERVICE_API_KEY"]
48+
pseudonym_service_api_key=secrets_file["PSEUDONYM_SERVICE_API_KEY"],
49+
datastore_api_service_key=secrets_file["DATASTORE_API_SERVICE_KEY"],
4650
)
4751

4852

0 commit comments

Comments
 (0)