Skip to content

Commit fc87fa6

Browse files
authored
Merge pull request #176 from datajoint/fix/plat-341-filepath-records
PLAT-341: debug _DJConnector._fetch_records for filepath attributes
2 parents e722645 + f85cfce commit fc87fa6

File tree

6 files changed

+91
-24
lines changed

6 files changed

+91
-24
lines changed

.github/workflows/development.yaml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -49,22 +49,22 @@ jobs:
4949
run: |
5050
export PHARUS_VERSION=$(cat pharus/version.py | grep -oP '\d+\.\d+\.\d+')
5151
export HOST_UID=$(id -u)
52-
docker-compose -f docker-compose-build.yaml up --exit-code-from pharus --build
52+
docker compose -f docker-compose-build.yaml up --exit-code-from pharus --build
5353
IMAGE=$(docker images --filter "reference=datajoint/pharus*" \
5454
--format "{{.Repository}}")
5555
TAG=$(docker images --filter "reference=datajoint/pharus*" --format "{{.Tag}}")
5656
docker save "${IMAGE}:${TAG}" | \
5757
gzip > "image-pharus-${PHARUS_VERSION}-py${PY_VER}-${DISTRO}.tar.gz"
5858
echo "PHARUS_VERSION=${PHARUS_VERSION}" >> $GITHUB_ENV
5959
- name: Add image artifact
60-
uses: actions/upload-artifact@v2
60+
uses: actions/upload-artifact@v4
6161
with:
6262
name: image-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
6363
path: "image-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}-\
6464
${{matrix.distro}}.tar.gz"
6565
retention-days: 1
6666
- name: Add pip artifacts
67-
uses: actions/upload-artifact@v2
67+
uses: actions/upload-artifact@v4
6868
with:
6969
name: pip-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}
7070
path: dist
@@ -95,14 +95,14 @@ jobs:
9595
PHARUS_VERSION=$(cat pharus/version.py | grep -oP '\d+\.\d+\.\d+')
9696
echo "PHARUS_VERSION=${PHARUS_VERSION}" >> $GITHUB_ENV
9797
- name: Fetch image artifact
98-
uses: actions/download-artifact@v2
98+
uses: actions/download-artifact@v4
9999
with:
100100
name: image-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
101101
- name: Run primary tests
102102
run: |
103103
export HOST_UID=$(id -u)
104104
docker load < "image-pharus-${PHARUS_VERSION}-py${PY_VER}-${DISTRO}.tar.gz"
105-
docker-compose -f docker-compose-test.yaml up --exit-code-from pharus
105+
docker compose -f docker-compose-test.yaml up --exit-code-from pharus
106106
publish-release:
107107
if: |
108108
github.event_name == 'push' &&
@@ -143,24 +143,24 @@ jobs:
143143
prerelease: ${{steps.changelog_reader.outputs.status == 'prereleased'}}
144144
draft: ${{steps.changelog_reader.outputs.status == 'unreleased'}}
145145
- name: Fetch image artifact
146-
uses: actions/download-artifact@v2
146+
uses: actions/download-artifact@v4
147147
with:
148148
name: image-pharus-${{env.PHARUS_VERSION}}-py3.8-alpine
149149
- name: Fetch 3.8 pip artifacts
150-
uses: actions/download-artifact@v2
150+
uses: actions/download-artifact@v4
151151
with:
152152
name: pip-pharus-${{env.PHARUS_VERSION}}-py3.8
153153
path: dist
154154
- name: Fetch 3.9 pip artifacts
155-
uses: actions/download-artifact@v2
155+
uses: actions/download-artifact@v4
156156
with:
157157
name: pip-pharus-${{env.PHARUS_VERSION}}-py3.8
158158
path: dist
159159
- name: Publish pip release
160160
run: |
161161
export HOST_UID=$(id -u)
162162
docker load < "image-pharus-${PHARUS_VERSION}-py3.8-alpine.tar.gz"
163-
docker-compose -f docker-compose-build.yaml run \
163+
docker compose -f docker-compose-build.yaml run \
164164
-e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} pharus \
165165
sh -lc "pip install twine && python -m twine upload dist/*"
166166
- name: Determine pip artifact paths
@@ -243,7 +243,7 @@ jobs:
243243
PHARUS_VERSION=$(cat pharus/version.py | grep -oP '\d+\.\d+\.\d+')
244244
echo "PHARUS_VERSION=${PHARUS_VERSION}" >> $GITHUB_ENV
245245
- name: Fetch image artifact
246-
uses: actions/download-artifact@v2
246+
uses: actions/download-artifact@v4
247247
with:
248248
name: image-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}-${{matrix.distro}}
249249
- name: Login to DockerHub
@@ -287,5 +287,5 @@ jobs:
287287
)
288288
needs: publish-release
289289
uses: datajoint/.github/.github/workflows/mkdocs_release.yaml@main
290-
permissions:
290+
permissions:
291291
contents: write

docker-compose-test.yaml

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,16 @@ services:
1515
- MYSQL_ROOT_PASSWORD=simple
1616
pharus:
1717
<<: *net
18+
stdin_open: true # docker run -i
19+
tty: true # docker run -t
1820
extends:
1921
file: ./docker-compose-build.yaml
2022
service: pharus
2123
environment:
2224
- FLASK_ENV=development # enables logging to console from Flask
23-
- TEST_DB_SERVER=db
24-
- TEST_DB_USER=root
25-
- TEST_DB_PASS=simple
25+
- TEST_DB_SERVER=${TEST_DB_SERVER:-db}
26+
- TEST_DB_USER=${TEST_DB_USER:-root}
27+
- TEST_DB_PASS=${TEST_DB_PASS:-simple}
2628
- AS_SCRIPT
2729
- PHARUS_SPEC_PATH=tests/init/test_dynamic_api_spec.yaml
2830
volumes:
@@ -39,12 +41,12 @@ services:
3941
flake8 $${PKG_DIR} --count --select=E9,F63,F7,F82 --show-source --statistics
4042
flake8 /main/tests --count --select=E9,F63,F7,F82 --show-source --statistics
4143
echo "------ UNIT TESTS ------"
42-
pytest -sv --cov-report term-missing --cov=pharus /main/tests
44+
pytest -sv --cov-report term-missing --cov=pharus /main/tests/
4345
echo "------ STYLE TESTS ------"
44-
black $${PKG_DIR} --check -v --extend-exclude "^.*dynamic_api.py$$"
46+
black $${PKG_DIR} --required-version '24.8.0' --check -v --extend-exclude "^.*dynamic_api.py$$"
4547
flake8 $${PKG_DIR} --count --max-complexity=20 --max-line-length=94 --statistics --exclude=*dynamic_api.py --ignore=W503,W605
46-
black /main/tests --check -v
47-
flake8 /main/tests --count --max-complexity=20 --max-line-length=94 --statistics --ignore=F401,F811,W503
48+
black /main/tests --required-version '24.8.0' --check -v
49+
flake8 /main/tests --count --max-complexity=20 --max-line-length=94 --statistics --ignore=F401,F811,W503,F403
4850
else
4951
echo "=== Running ==="
5052
echo "Please see 'docker-compose-test.yaml' for detail on running tests."

pharus/interface.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,15 @@ def _fetch_records(
176176
# Loop through each attributes, append to the tuple_to_return with specific
177177
# modification based on data type
178178
for attribute_name, attribute_info in attributes.items():
179-
if not attribute_info.is_blob:
179+
if attribute_info.is_external:
180+
# Attribute is external type (filepath or attach),
181+
# thus fill it in string instead
182+
(
183+
row.append(non_blobs_row[attribute_name])
184+
if fetch_blobs
185+
else row.append("=FILE=")
186+
)
187+
elif not attribute_info.is_blob:
180188
if non_blobs_row[attribute_name] is None:
181189
# If it is none then just append None
182190
row.append(None)

tests/__init__.py

Lines changed: 21 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import pytest
2+
from typing import Optional
23
from pharus.dynamic_api import app
34
from uuid import UUID
45
from os import getenv
@@ -13,6 +14,25 @@
1314
SCHEMA_PREFIX = "test_"
1415

1516

17+
def get_db_creds():
18+
return dict(
19+
host=getenv("TEST_DB_SERVER"),
20+
user=getenv("TEST_DB_USER"),
21+
password=getenv("TEST_DB_PASS"),
22+
)
23+
24+
25+
def get_schema_as_vm(
26+
schema: str, connection: dj.Connection
27+
) -> Optional[dj.VirtualModule]:
28+
try:
29+
return dj.VirtualModule(schema, schema, connection=connection)
30+
except dj.DataJointError as e:
31+
if "has not yet been declared" in str(e):
32+
return None
33+
raise
34+
35+
1636
@pytest.fixture
1737
def client():
1838
"""REST client interface."""
@@ -59,11 +79,7 @@ def group1_token(client, connection):
5979
def connection():
6080
"""Root database connection."""
6181
dj.config["safemode"] = False
62-
connection = dj.Connection(
63-
host=getenv("TEST_DB_SERVER"),
64-
user=getenv("TEST_DB_USER"),
65-
password=getenv("TEST_DB_PASS"),
66-
)
82+
connection = dj.Connection(**get_db_creds())
6783
yield connection
6884
dj.config["safemode"] = True
6985
connection.close()

tests/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from . import *

tests/test_interface.py

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
import os
2+
from typing import Optional, Generator, Any
3+
import pytest
4+
import datajoint as dj
5+
from pharus.interface import _DJConnector as DJC
6+
from . import get_schema_as_vm, get_db_creds
7+
8+
9+
@pytest.fixture
10+
def nei_nienborg_model_labeledvideo_file(
11+
connection: dj.Connection,
12+
) -> Generator[Optional[dj.Table], Any, Any]:
13+
with pytest.MonkeyPatch.context() as mp:
14+
mp.setenv("DJ_SUPPORT_FILEPATH_MANAGEMENT", "TRUE")
15+
vm = get_schema_as_vm("nei_nienborg_model", connection)
16+
yield None if vm is None else vm.LabeledVideo.File
17+
18+
19+
class TestDJConnector:
20+
21+
def test_can_init(self):
22+
djc = DJC()
23+
assert djc is not None
24+
25+
@pytest.mark.skipif(
26+
(get_schema_as_vm("nei_nienborg_model", dj.conn(**get_db_creds())) is None),
27+
reason="Cannot access schema 'nei_nienborg_model' with these credentials",
28+
)
29+
def test_can_fetch_filepath_attrs(
30+
self, nei_nienborg_model_labeledvideo_file: dj.Table, connection
31+
):
32+
"""
33+
Tests _DJConnector._fetch_records for a table with a filepath attribute
34+
35+
https://datajoint.atlassian.net/browse/PLAT-341
36+
"""
37+
table = nei_nienborg_model_labeledvideo_file
38+
assert os.environ.get("DJ_SUPPORT_FILEPATH_MANAGEMENT").upper() == "TRUE"
39+
assert table.fetch("KEY", limit=1, download_path=None)
40+
assert table.fetch(limit=1)

0 commit comments

Comments
 (0)