Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit b6a7d62

Browse files
authoredMar 21, 2025··
Merge pull request #1096 from roboflow/fix/code-scanning/98
Address code-scanning/98
2 parents 522a0ee + c83698a commit b6a7d62

File tree

9 files changed

+105
-42
lines changed

9 files changed

+105
-42
lines changed
 

‎inference/core/interfaces/http/builder/routes.py

+20-13
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,9 @@
22
import logging
33
import os
44
import re
5+
from hashlib import sha256
56
from pathlib import Path
7+
from typing import Any, Dict
68

79
from fastapi import APIRouter, Body, Depends, Header, HTTPException, status
810
from starlette.responses import HTMLResponse, JSONResponse, RedirectResponse, Response
@@ -112,12 +114,12 @@ async def get_all_workflows():
112114
stat_info = json_file.stat()
113115
try:
114116
with json_file.open("r", encoding="utf-8") as f:
115-
config_contents = json.load(f)
117+
config_contents: Dict[str, Any] = json.load(f)
116118
except json.JSONDecodeError as e:
117119
logger.error(f"Error decoding JSON from {json_file}: {e}")
118120
continue
119121

120-
data[json_file.stem] = {
122+
data[config_contents.get("id", json_file.stem)] = {
121123
"createTime": int(stat_info.st_ctime),
122124
"updateTime": int(stat_info.st_mtime),
123125
"config": config_contents,
@@ -139,7 +141,8 @@ async def get_workflow(workflow_id: str):
139141
if not re.match(r"^[\w\-]+$", workflow_id):
140142
return JSONResponse({"error": "invalid id"}, status_code=HTTP_400_BAD_REQUEST)
141143

142-
file_path = workflow_local_dir / f"{workflow_id}.json"
144+
workflow_hash = sha256(workflow_id.encode()).hexdigest()
145+
file_path = workflow_local_dir / f"{workflow_hash}.json"
143146
if not file_path.exists():
144147
return JSONResponse({"error": "not found"}, status_code=HTTP_404_NOT_FOUND)
145148

@@ -148,7 +151,7 @@ async def get_workflow(workflow_id: str):
148151
with file_path.open("r", encoding="utf-8") as f:
149152
config_contents = json.load(f)
150153
except json.JSONDecodeError as e:
151-
logger.error(f"Error reading JSON from {file_path}: {e}")
154+
logger.error(f"Error reading JSON for {workflow_id} from '{file_path}': {e}")
152155
return JSONResponse({"error": "invalid JSON"}, status_code=500)
153156

154157
return Response(
@@ -179,32 +182,34 @@ async def create_or_overwrite_workflow(
179182
if not re.match(r"^[\w\-]+$", workflow_id):
180183
return JSONResponse({"error": "invalid id"}, status_code=HTTP_400_BAD_REQUEST)
181184

182-
file_path = workflow_local_dir / f"{workflow_id}.json"
183185
workflow_local_dir.mkdir(parents=True, exist_ok=True)
184186

185187
# If the body claims a different ID, treat that as a "rename".
186188
if request_body.get("id") and request_body.get("id") != workflow_id:
187-
old_id = request_body["id"]
189+
old_id: str = request_body["id"]
188190
if not re.match(r"^[\w\-]+$", old_id):
189191
return JSONResponse(
190192
{"error": "invalid id"}, status_code=HTTP_400_BAD_REQUEST
191193
)
192194

193-
old_file_path = workflow_local_dir / f"{old_id}.json"
195+
old_workflow_hash = sha256(old_id.encode()).hexdigest()
196+
old_file_path = workflow_local_dir / f"{old_workflow_hash}.json"
194197
if old_file_path.exists():
195198
try:
196199
old_file_path.unlink()
197200
except Exception as e:
198-
logger.error(f"Error deleting {old_file_path}: {e}")
201+
logger.error(f"Error deleting {old_id} from {old_file_path}: {e}")
199202
return JSONResponse({"error": "unable to delete file"}, status_code=500)
200203

201-
request_body["id"] = workflow_id
204+
request_body["id"] = workflow_id
202205

206+
workflow_hash = sha256(workflow_id.encode()).hexdigest()
207+
file_path = workflow_local_dir / f"{workflow_hash}.json"
203208
try:
204209
with file_path.open("w", encoding="utf-8") as f:
205210
json.dump(request_body, f, indent=2)
206211
except Exception as e:
207-
logger.error(f"Error writing JSON to {file_path}: {e}")
212+
logger.error(f"Error writing JSON for {workflow_id} to {file_path}: {e}")
208213
return JSONResponse({"error": "unable to write file"}, status_code=500)
209214

210215
return JSONResponse(
@@ -223,14 +228,15 @@ async def delete_workflow(workflow_id: str):
223228
if not re.match(r"^[\w\-]+$", workflow_id):
224229
return JSONResponse({"error": "invalid id"}, status_code=HTTP_400_BAD_REQUEST)
225230

226-
file_path = workflow_local_dir / f"{workflow_id}.json"
231+
workflow_hash = sha256(workflow_id.encode()).hexdigest()
232+
file_path = workflow_local_dir / f"{workflow_hash}.json"
227233
if not file_path.exists():
228234
return JSONResponse({"error": "not found"}, status_code=HTTP_404_NOT_FOUND)
229235

230236
try:
231237
file_path.unlink()
232238
except Exception as e:
233-
logger.error(f"Error deleting {file_path}: {e}")
239+
logger.error(f"Error deleting {workflow_id} from {file_path}: {e}")
234240
return JSONResponse({"error": "unable to delete file"}, status_code=500)
235241

236242
return JSONResponse(
@@ -253,7 +259,8 @@ async def builder_maybe_redirect(workflow_id: str):
253259
if not re.match(r"^[\w\-]+$", workflow_id):
254260
return RedirectResponse(url="/build", status_code=302)
255261

256-
file_path = workflow_local_dir / f"{workflow_id}.json"
262+
workflow_hash = sha256(workflow_id.encode()).hexdigest()
263+
file_path = workflow_local_dir / f"{workflow_hash}.json"
257264
if file_path.exists():
258265
return RedirectResponse(url=f"/build/edit/{workflow_id}", status_code=302)
259266
else:

‎tests/inference/hosted_platform_tests/conftest.py

+21-8
Original file line numberDiff line numberDiff line change
@@ -63,9 +63,10 @@ class PlatformEnvironment(Enum):
6363
"classification": "https://lambda-classification.staging.roboflow.com",
6464
"core-models": "https://3hkaykeh3j.execute-api.us-east-1.amazonaws.com",
6565
},
66-
6766
}
68-
SERVICES_URLS[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = SERVICES_URLS[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST]
67+
SERVICES_URLS[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = SERVICES_URLS[
68+
PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST
69+
]
6970

7071
MODELS_TO_BE_USED = {
7172
PlatformEnvironment.ROBOFLOW_PLATFORM: {
@@ -85,15 +86,23 @@ class PlatformEnvironment(Enum):
8586
"yolov8n-pose-640": "microsoft-coco-pose/1",
8687
},
8788
}
88-
MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING]
89-
MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM]
89+
MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = MODELS_TO_BE_USED[
90+
PlatformEnvironment.ROBOFLOW_STAGING
91+
]
92+
MODELS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = MODELS_TO_BE_USED[
93+
PlatformEnvironment.ROBOFLOW_PLATFORM
94+
]
9095

9196
TARGET_PROJECTS_TO_BE_USED = {
9297
PlatformEnvironment.ROBOFLOW_PLATFORM: "active-learning-demo",
9398
PlatformEnvironment.ROBOFLOW_STAGING: "coin-counting",
9499
}
95-
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING]
96-
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM]
100+
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = (
101+
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_STAGING]
102+
)
103+
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = (
104+
TARGET_PROJECTS_TO_BE_USED[PlatformEnvironment.ROBOFLOW_PLATFORM]
105+
)
97106

98107
INTERFACE_DISCOVERING_WORKFLOW = {
99108
PlatformEnvironment.ROBOFLOW_STAGING: ("paul-guerrie", "staging-test-workflow"),
@@ -102,8 +111,12 @@ class PlatformEnvironment(Enum):
102111
"prod-test-workflow",
103112
),
104113
}
105-
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_STAGING]
106-
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_PLATFORM]
114+
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = (
115+
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_STAGING]
116+
)
117+
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = (
118+
INTERFACE_DISCOVERING_WORKFLOW[PlatformEnvironment.ROBOFLOW_PLATFORM]
119+
)
107120

108121
ROBOFLOW_API_KEY = os.environ["HOSTED_PLATFORM_TESTS_API_KEY"]
109122
OPENAI_KEY = os.getenv("OPENAI_KEY")

‎tests/inference/hosted_platform_tests/workflows_examples/roboflow_models/v1/test_workflow_for_classification.py

+10-2
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,16 @@ def test_multi_class_classification_workflow(
124124
],
125125
PlatformEnvironment.ROBOFLOW_PLATFORM: [{"dog"}, set()],
126126
}
127-
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING]
128-
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM]
127+
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
128+
PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST
129+
] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
130+
PlatformEnvironment.ROBOFLOW_STAGING
131+
]
132+
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
133+
PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST
134+
] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
135+
PlatformEnvironment.ROBOFLOW_PLATFORM
136+
]
129137

130138

131139
@pytest.mark.flaky(retries=4, delay=1)

‎tests/inference/hosted_platform_tests/workflows_examples/roboflow_models/v2/test_workflow_for_classification.py

+21-4
Original file line numberDiff line numberDiff line change
@@ -40,8 +40,17 @@
4040
PlatformEnvironment.ROBOFLOW_STAGING: [0.3667, 0.5917],
4141
PlatformEnvironment.ROBOFLOW_PLATFORM: [0.8252, 0.9962],
4242
}
43-
MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING]
44-
MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM]
43+
MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
44+
PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST
45+
] = MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
46+
PlatformEnvironment.ROBOFLOW_STAGING
47+
]
48+
MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
49+
PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST
50+
] = MULTI_CLASS_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
51+
PlatformEnvironment.ROBOFLOW_PLATFORM
52+
]
53+
4554

4655
@pytest.mark.flaky(retries=4, delay=1)
4756
def test_multi_class_classification_workflow(
@@ -125,8 +134,16 @@ def test_multi_class_classification_workflow(
125134
],
126135
PlatformEnvironment.ROBOFLOW_PLATFORM: [{"dog"}, set()],
127136
}
128-
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_STAGING]
129-
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[PlatformEnvironment.ROBOFLOW_PLATFORM]
137+
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
138+
PlatformEnvironment.ROBOFLOW_STAGING_LOCALHOST
139+
] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
140+
PlatformEnvironment.ROBOFLOW_STAGING
141+
]
142+
MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
143+
PlatformEnvironment.ROBOFLOW_PLATFORM_LOCALHOST
144+
] = MULTI_LABEL_CLASSIFICATION_RESULTS_FOR_ENVIRONMENT[
145+
PlatformEnvironment.ROBOFLOW_PLATFORM
146+
]
130147

131148

132149
@pytest.mark.flaky(retries=4, delay=1)

‎tests/inference/unit_tests/core/interfaces/http/test_builder.py

+17-2
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ def fake_read_text(self, encoding="utf-8"):
3939
app.include_router(routes.router, prefix="/build")
4040
return app
4141

42+
4243
def test_builder_html_injects_csrf(builder_app, builder_env_session):
4344
"""
4445
Verify that the HTML response for GET /build has the CSRF token injected.
@@ -56,6 +57,7 @@ def test_builder_html_injects_csrf(builder_app, builder_env_session):
5657
token = token_match.group(1)
5758
assert len(token) == 32, "CSRF token should be 32 hex digits long"
5859

60+
5961
def test_builder_edit_injects_csrf(builder_app, builder_env_session):
6062
"""
6163
Verify that GET /build/edit/{workflow_id} returns HTML with the CSRF token.
@@ -68,34 +70,41 @@ def test_builder_edit_injects_csrf(builder_app, builder_env_session):
6870
token = token_match.group(1)
6971
assert len(token) == 32, "CSRF token should be 32 hex digits long"
7072

73+
7174
def test_builder_redirect_trailing_slash(builder_app):
7275
"""
7376
Verify that GET /build/ returns a redirect.
7477
"""
7578
client = TestClient(builder_app)
7679
response = client.get("/build/", follow_redirects=False)
77-
assert response.status_code == HTTP_302_FOUND, f"Expected 302, got {response.status_code}"
80+
assert (
81+
response.status_code == HTTP_302_FOUND
82+
), f"Expected 302, got {response.status_code}"
7883
assert response.headers["location"] == "/build"
7984

85+
8086
def test_api_get_all_workflows_unauthorized(builder_app):
8187
client = TestClient(builder_app)
8288
response = client.get("/build/api")
8389
assert response.status_code == HTTP_403_FORBIDDEN
8490

91+
8592
def test_api_get_workflow_invalid_id(builder_app):
8693
"""
8794
Use an invalid workflow_id (with an illegal character) so that the route
8895
returns 400 instead of 404. (Using slashes would fail to match the route.)
8996
"""
9097
client = TestClient(builder_app)
9198
from inference.core.interfaces.http.builder.routes import csrf
99+
92100
invalid_id = "invalid$id" # '$' is not allowed by the regex [\w\-]+
93101
response = client.get(
94102
f"/build/api/{invalid_id}",
95103
headers={"X-CSRF": csrf},
96104
)
97105
assert response.status_code == HTTP_400_BAD_REQUEST
98106

107+
99108
def test_api_create_and_read(builder_app):
100109
client = TestClient(builder_app)
101110
from inference.core.interfaces.http.builder.routes import csrf
@@ -114,20 +123,25 @@ def test_api_create_and_read(builder_app):
114123
data = get_resp.json()
115124
assert data["data"]["config"] == {"id": "test-wf", "stuff": 123}
116125

126+
117127
def test_fallback_redirect_invalid_id(builder_app):
118128
"""
119129
With an invalid id containing slashes, the route will not match and yield a 404.
120130
"""
121131
client = TestClient(builder_app)
122132
response = client.get("/build/../../etc/passwd", follow_redirects=False)
123-
assert response.status_code == HTTP_404_NOT_FOUND, f"Expected 404, got {response.status_code}"
133+
assert (
134+
response.status_code == HTTP_404_NOT_FOUND
135+
), f"Expected 404, got {response.status_code}"
136+
124137

125138
def test_fallback_redirect_exists(builder_app):
126139
"""
127140
Create a workflow via the JSON API and verify that GET /build/<id> redirects to /build/edit/<id>.
128141
"""
129142
client = TestClient(builder_app)
130143
from inference.core.interfaces.http.builder.routes import csrf
144+
131145
client.post(
132146
"/build/api/foobar",
133147
json={"id": "foobar"},
@@ -137,6 +151,7 @@ def test_fallback_redirect_exists(builder_app):
137151
assert response.status_code == HTTP_302_FOUND
138152
assert response.headers["location"] == "/build/edit/foobar"
139153

154+
140155
def test_fallback_redirect_not_exists(builder_app):
141156
"""
142157
If the workflow file does not exist, GET /build/<id> should redirect to /build.

‎tests/inference/unit_tests/core/interfaces/http/test_cors.py

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
def homepage(request):
1010
return PlainTextResponse("Hello, world!")
1111

12+
1213
def create_app(match_paths=None, allow_origins=None):
1314
"""
1415
Utility to create a Starlette test app with our custom PathAwareCORSMiddleware.

‎tests/workflows/integration_tests/execution/test_workflow_with_camera_calibration.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -80,10 +80,10 @@ def test_workflow_with_camera_calibration(
8080
result = execution_engine.run(
8181
runtime_parameters={
8282
"images": [dogs_image],
83-
"fx": 1.48052348e+03,
84-
"fy": 1.62041507e+03,
85-
"cx": 7.76228486e+02,
86-
"cy": 5.09102914e+02,
83+
"fx": 1.48052348e03,
84+
"fy": 1.62041507e03,
85+
"cx": 7.76228486e02,
86+
"cy": 5.09102914e02,
8787
"k1": -0.67014685,
8888
"k2": 0.84140975,
8989
"k3": -0.40499778,

‎tests/workflows/integration_tests/execution/test_workflow_with_detections_merge.py

+10-8
Original file line numberDiff line numberDiff line change
@@ -80,30 +80,32 @@ def test_detections_merge_workflow(
8080
assert isinstance(
8181
result[0]["result"], sv.Detections
8282
), "Output must be instance of sv.Detections"
83-
83+
8484
# Check that we have exactly one merged detection
8585
assert len(result[0]["result"]) == 1, "Should have exactly one merged detection"
86-
86+
8787
# Check that the merged detection has all required fields
8888
assert "class_name" in result[0]["result"].data, "Should have class_name in data"
89-
assert "detection_id" in result[0]["result"].data, "Should have detection_id in data"
90-
89+
assert (
90+
"detection_id" in result[0]["result"].data
91+
), "Should have detection_id in data"
92+
9193
# Check that the bounding box has reasonable dimensions
9294
merged_bbox = result[0]["result"].xyxy[0]
9395
image_height, image_width = dogs_image.shape[:2]
94-
96+
9597
# Check that coordinates are within image bounds
9698
assert 0 <= merged_bbox[0] <= image_width, "x1 should be within image bounds"
9799
assert 0 <= merged_bbox[1] <= image_height, "y1 should be within image bounds"
98100
assert 0 <= merged_bbox[2] <= image_width, "x2 should be within image bounds"
99101
assert 0 <= merged_bbox[3] <= image_height, "y2 should be within image bounds"
100-
102+
101103
# Check that the box has reasonable dimensions
102104
assert merged_bbox[2] > merged_bbox[0], "x2 should be greater than x1"
103105
assert merged_bbox[3] > merged_bbox[1], "y2 should be greater than y1"
104-
106+
105107
# Check that the box is large enough to likely contain the dogs
106108
box_width = merged_bbox[2] - merged_bbox[0]
107109
box_height = merged_bbox[3] - merged_bbox[1]
108110
assert box_width > 100, "Merged box should be reasonably wide"
109-
assert box_height > 100, "Merged box should be reasonably tall"
111+
assert box_height > 100, "Merged box should be reasonably tall"

‎tests/workflows/unit_tests/core_steps/transformations/test_detections_merge.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -114,4 +114,4 @@ def test_detections_merge_block_empty_input() -> None:
114114
assert "predictions" in output
115115
assert len(output["predictions"]) == 0
116116
assert isinstance(output["predictions"].xyxy, np.ndarray)
117-
assert output["predictions"].xyxy.shape == (0, 4)
117+
assert output["predictions"].xyxy.shape == (0, 4)

0 commit comments

Comments
 (0)
Please sign in to comment.