diff --git a/.github/ISSUE_TEMPLATE/CONTRIBUTING.md b/.github/ISSUE_TEMPLATE/CONTRIBUTING.md new file mode 100644 index 0000000..f07cecb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/CONTRIBUTING.md @@ -0,0 +1,3 @@ +# Contribution Guidelines + +This project follows the [DataJoint Contribution Guidelines](https://docs.datajoint.io/python/community/02-Contribute.html). Please reference the link for more full details. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 0000000..97e198e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,36 @@ +--- +name: Bug report +about: Create a report to help us improve +title: '' +labels: 'bug' +assignees: '' + +--- + +## Bug Report + +### Description +A clear and concise description of what is the overall operation that is intended to be performed that resulted in an error. + +### Reproducibility +Include: +- OS (WIN | MACOS | Linux) +- Pharus Version +- MySQL Version +- MySQL Deployment Strategy (local-native | local-docker | remote) +- Minimum number of steps to reliably reproduce the issue +- Complete error stack as a result of evaluating the above steps + +### Expected Behavior +A clear and concise description of what you expected to happen. + +### Screenshots +If applicable, add screenshots to help explain your problem. + +### Additional Research and Context +Add any additional research or context that was conducted in creating this report. + +For example: +- Related GitHub issues and PR's either within this repository or in other relevant repositories. +- Specific links to specific lines or a focus within source code. +- Relevant summary of Maintainers development meetings, milestones, projects, etc. \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000..25302da --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: DataJoint Contribution Guideline + url: https://docs.datajoint.io/python/community/02-Contribute.html + about: Please make sure to review the DataJoint Contribution Guideline \ No newline at end of file diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000..8ec3c5f --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,45 @@ +--- +name: Feature request +about: Suggest an idea for a new feature +title: '' +labels: 'enhancement' +assignees: '' + +--- + +## Feature Request + +### Problem +A clear and concise description how this idea has manifested and the context. Elaborate on the need for this feature and/or what could be improved. Ex. I'm always frustrated when [...] + +### Requirements +A clear and concise description of the requirements to satisfy the new feature. Detail what you expect from a successful implementation of the feature. Ex. When using this feature, it should [...] + +### Justification +Provide the key benefits in making this a supported feature. Ex. Adding support for this feature would ensure [...] + +### Alternative Considerations +Do you currently have a work-around for this? Provide any alternative solutions or features you've considered. + +### Related Errors +Add any errors as a direct result of not exposing this feature. + +Please include steps to reproduce provided errors as follows: +- OS (WIN | MACOS | Linux) +- Pharus Version +- MySQL Version +- MySQL Deployment Strategy (local-native | local-docker | remote) +- Minimum number of steps to reliably reproduce the issue +- Complete error stack as a result of evaluating the above steps + +### Screenshots +If applicable, add screenshots to help explain your feature. + +### Additional Research and Context +Add any additional research or context that was conducted in creating this feature request. + +For example: +- Related GitHub issues and PR's either within this repository or in other relevant repositories. +- Specific links to specific lines or a focus within source code. +- Relevant summary of Maintainers development meetings, milestones, projects, etc. +- Any additional supplemental web references or links that would further justify this feature request. \ No newline at end of file diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml index 90c2343..af38318 100644 --- a/.github/workflows/development.yaml +++ b/.github/workflows/development.yaml @@ -27,6 +27,29 @@ jobs: echo "BODY=${BODY}" echo "PRERELEASE=${PRERELEASE}" echo "DRAFT=${DRAFT}" + build-docs: + needs: test-changelog + runs-on: ubuntu-latest + env: + DOCKER_CLIENT_TIMEOUT: "120" + COMPOSE_HTTP_TIMEOUT: "120" + PY_VER: "3.8" + IMAGE: "djtest" + DISTRO: "alpine" + steps: + - uses: actions/checkout@v2 + - name: Compile docs static artifacts + run: | + export PHARUS_VERSION=$(cat pharus/version.py | tail -1 | awk -F\' '{print $2}') + export HOST_UID=$(id -u) + docker-compose -f docker-compose-docs.yaml up --exit-code-from pharus --build + echo "PHARUS_VERSION=${PHARUS_VERSION}" >> $GITHUB_ENV + - name: Add docs static artifacts + uses: actions/upload-artifact@v2 + with: + name: docs-static-pharus-${{env.PHARUS_VERSION}} + path: docs/_build/html + retention-days: 1 build: needs: test-changelog runs-on: ubuntu-latest @@ -220,39 +243,51 @@ jobs: asset_name: "image-pharus-${{env.PHARUS_VERSION}}-py${{matrix.py_ver}}-\ ${{matrix.distro}}.tar.gz" asset_content_type: application/gzip - # publish-docs: - # if: github.event_name == 'push' && github.ref == 'refs/heads/master' - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v1 - # - name: Build docs - # uses: ammaraskar/sphinx-action@master - # with: - # docs-folder: "docs/" - # # - name: Upload docs artifact - # # uses: actions/upload-artifact@v1 - # # with: - # # name: docs-html - # # path: docs/_build/html/ - # # retention-days: 1 - # - name: Commit documentation changes - # run: | - # git clone https://github.com/${GITHUB_REPOSITORY}.git \ - # --branch gh-pages --single-branch gh-pages - # rm -R gh-pages/* - # cp -r docs/_build/html/* gh-pages/ - # cp .gitignore gh-pages/ - # touch gh-pages/.nojekyll - # cd gh-pages - # git config --local user.email "action@github.com" - # git config --local user.name "GitHub Action" - # git add . --all - # git commit -m "Update documentation" -a || true - # # The above command will fail if no changes were present, so we ignore - # # the return code. - # - name: Push changes - # uses: ad-m/github-push-action@master - # with: - # branch: gh-pages - # directory: gh-pages - # github_token: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file + publish-docs: + if: github.event_name == 'push' + needs: publish-release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Determine package version + run: | + PHARUS_VERSION=$(cat pharus/version.py | tail -1 | awk -F\' '{print $2}') + echo "PHARUS_VERSION=${PHARUS_VERSION}" >> $GITHUB_ENV + - name: Fetch docs static artifacts + uses: actions/download-artifact@v2 + with: + name: docs-static-pharus-${{env.PHARUS_VERSION}} + path: docs/_build/html + - name: Commit documentation changes + run: | + git clone https://github.com/${GITHUB_REPOSITORY}.git \ + --branch gh-pages --single-branch gh-pages + rm -R gh-pages/* + cp -r docs/_build/html/* gh-pages/ + cp .gitignore gh-pages/ + touch gh-pages/.nojekyll + cd gh-pages + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add . --all + git commit -m "Update documentation" -a || true + # The above command will fail if no changes were present, so we ignore + # the return code. + - name: Push changes + uses: ad-m/github-push-action@master + with: + branch: gh-pages + directory: gh-pages + github_token: ${{secrets.GITHUB_TOKEN}} + - name: Compress docs static site artifacts + run: zip -r docs-static-pharus-${PHARUS_VERSION}.zip docs/_build/html + - name: Upload docs static site to release + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + with: + upload_url: ${{needs.publish-release.outputs.release_upload_url}} + asset_path: docs-static-pharus-${PHARUS_VERSION}.zip + asset_name: docs-static-pharus-${PHARUS_VERSION}.zip + asset_content_type: application/zip + # fail_on_unmatched_files: true diff --git a/.gitignore b/.gitignore index a863bef..80d1755 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,6 @@ docker-compose.y*ml .coverage *.egg-info dist -build \ No newline at end of file +build +docs/_build +*.tar.gz diff --git a/CHANGELOG.md b/CHANGELOG.md index 4cd6220..04a2ee2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,11 +2,20 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -## [Unreleased] +## [0.1.0b1] - 2021-03-11 ### Fixed - Fixed behavior where using list_table with a nonexistent schema_name creates it instead of returning an error message (#65) PR #63 +### Changed +- Contribution policy to follow directly the general DataJoint Contribution Guideline. (#91) PR #94 + +### Added +- Issue templates for bug reports and enhancement requests. PR #94 +- Docker environment for documentation build. (#92) PR #94 +- Add Sphinx-based documentation source and fix parsing issues. (#92) PR #94 +- GitHub Actions automation that publishes on release new docs to release and GitHub Pages. (#92) PR #94 + ## [0.1.0b0] - 2021-02-26 ### Security @@ -41,6 +50,6 @@ Observes [Semantic Versioning](https://semver.org/spec/v2.0.0.html) standard and - Support for DataJoint attribute types: `varchar`, `int`, `float`, `datetime`, `date`, `time`, `decimal`, `uuid`. - Check dependency utility to determine child table references. -[Unreleased]: https://github.com/datajoint/pharus/compare/0.1.0b0...HEAD +[0.1.0b1]: https://github.com/datajoint/pharus/compare/0.1.0b0...0.1.0b1 [0.1.0b0]: https://github.com/datajoint/pharus/compare/0.1.0a5...0.1.0b0 [0.1.0a5]: https://github.com/datajoint/pharus/releases/tag/0.1.0a5 \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 5b31124..0000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,88 +0,0 @@ -# Contribution Guidelines - -Thank you for your interest in contributing! :handshake: - -To help keep everyone in alignment and coordinated in the community effort, we've created this document. It serves as the contribution guidelines that outline how open-source software development is to be conducted. Any software development that makes reference to this can be assumed to adopt the policies outlined below. We've structured it in FAQ format to make it easier to digest. Feel free review the questions below to determine any specific policy. - -## Which issue should I contribute towards? - -There is primarily 3 things to consider when looking to contribute. - -- **Availability**: Simply if anyone is currently working on a fix. This is represented by who is `assigned`. Issues that are `unassigned` mean that there is no one yet working on resolving the issue. -- **Specification**: In order for issues to be properly addressed, the requirements of satisfying and closing the issue should be clear. If it is not, a label will be added as `unspecified`. This could be due to more debug info being necessary, more details on intended behavior, or perhaps that further discussion is required to determine a good solution. Feel free to help us arrive at a proper specification. -- **Priority**: As a community, we work on a concerted effort to bring about the realization of the milestones. We utilize milestones as a planning tool to help focus a group of changes around a release. To determine the priority of issues, simply have a look at the next milestone that is expected to arrive. Therefore, each milestone following this can be understood as lower in priority respectively. Bear in mind that much like a hurricane forecast, the execution plan is much more likely to be accurate the closer to today's date as opposed to milestones further out. Extremely low priority issues are assigned to the `Backburner` milestone. Since it does not have a target date this indicates that they may be deferred indefinitely. Occasionally the maintainers will move issues from `Backburner` as it makes sense to address them within a release. Also, issues `unassigned` to a milestone can be understood as new issues which have not been triaged. - -After considering the above, you may comment on the issue you'd like to help fix and a maintainer will assign it to you. - -## What is the proper etiquette for proposing changes as contribution? - -What is generally expected from new contributions are the following: - -- Any proposed contributor changes should be introduced in the form of a pull request (PR) from their fork. -- Proper branch target specified. The following is generally the available branches that can be targeted: - - `master` or `main`: Represents the single source of truth and the latest in completed development. - - `pre`: Represents the source at the point of the last stable release. - For larger more involved changes, a maintainer may determine it best to create a feature-specific branch and adjust the PR accordingly. -- A summary description that describes the overall intent behind the PR. -- Proper links to the issue(s) that the PR serves to resolve. -- Newly introduced changes must pass any required checks. Typically as it relates to tests, this means: - 1. No syntax errors - 2. No integration errors - 3. No style errors e.g. PEP8, etc. - 4. Similar or better code coverage -- Additional documentation to reflect new feature or behavior introduced. -- Necessary updates to the changelog following [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) convention. -- A contributor should not approve or merge their own PR. -- Reviewer suggestions or feedback should not be directly committed to a branch on contributor's fork. A less intrusive way to collaborate would be for the reviewer to PR to the contributor's fork/branch that is associated with the main PR currently in review. - -Maintainers will also ensure that PR's have the appropriate assignment for reviewer, milestone, and project. - -## How can I track the progress of an issue that has been assigned? - -Since milestones represent the development plan, projects represent the actual execution. Projects are typically fixed-time sprints (1-2 weeks). A 'workable' number of issues that have been assigned to developers and assigned to the next milestone are selected and tracked in each project to provide greater granularity in the week-to-week progress. Automation is included observing the `Automated kanban with reviews` template. Maintainers will adjust the project assignment to reflect the order in which to resolve the milestone issues. - -## What is the release process? How do I know when my merged PR will officially make it into a release? - -Releases follow the standard definition of [semantic versioning](https://semver.org/spec/v2.0.0.html). Meaning: - -`MAJOR`.`MINOR`.`PATCH` - -1. `MAJOR` version when you make incompatible API changes, -2. `MINOR` version when you add functionality in a backwards compatible manner, and -3. `PATCH` version when you make backwards compatible bug fixes. - -Each release requires tagging the commit appropriately and is then issued in the normal medium for release e.g. PyPi, NPM, YARN, GitHub Release, etc. - -Minor releases are triggered when all the issues assigned to a milestone are resolved and closed. Patch releases are triggered periodically from `master` or `main` after a reasonable number of PR merges have come in. - -## I am not yet too comfortable contributing but would like to engage the community. What is the policy on community engagement? - -In order to follow the appropriate process and setting, please reference the following flow for your desired mode of engagement: - -### Generally, how do I perform **__________**? - -If the documentation does not provide clear enough instruction, please see StackOverflow posts related to the [datajoint](https://stackoverflow.com/questions/tagged/datajoint) tag or ask a new question tagging it appropriately. You may refer to our [datajoint tag wiki](https://stackoverflow.com/tags/datajoint/info) for more details on its proper use. - -### I just encountered this error, how can I resolve it? - -Please see StackOverflow posts related to the [datajoint](https://stackoverflow.com/questions/tagged/datajoint) tag or ask a new question tagging it appropriately. You may refer to our [datajoint tag wiki](https://stackoverflow.com/tags/datajoint/info) for more details on its proper use. - -### I just encountered this error and I am sure it is a bug, how do I report it? - -Please file it under the issue tracker associated with the open-source software. - -### I have an idea or new feature request, how do I submit it? - -Please file it under the issue tracker associated with the open-source software. - -### I am curious why the maintainers choose to **__________**? i.e. questions that are 'opinionated' in nature with answers that some might disagree. - - Please join the community on the [DataJoint Slack](https://join.slack.com/t/datajoint/shared_invite/enQtMjkwNjQxMjI5MDk0LTQ3ZjFiZmNmNGVkYWFkYjgwYjdhNTBlZTBmMWEyZDc2NzZlYTBjOTNmYzYwOWRmOGFmN2MyYzU0OWQ0MWZiYTE) and ask on the most relevant channel. There, you may engage directly with the maintainers for proper discourse. - -### What is the timeline or roadmap for the release of certain supported features? - -Please refer to milestones and projects associated with the open-source software. - -### I need urgent help best suited for live debugging, how can I reach out directly? - - Please join the community on the [DataJoint Slack](https://join.slack.com/t/datajoint/shared_invite/enQtMjkwNjQxMjI5MDk0LTQ3ZjFiZmNmNGVkYWFkYjgwYjdhNTBlZTBmMWEyZDc2NzZlYTBjOTNmYzYwOWRmOGFmN2MyYzU0OWQ0MWZiYTE) and ask on the most relevant channel. Please bear in mind that as open-source community software, availability of the maintainers might be limited. diff --git a/Dockerfile b/Dockerfile index 2dee20b..c1de4bd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ ARG PY_VER ARG DISTRO ARG IMAGE FROM datajoint/${IMAGE}:py${PY_VER}-${DISTRO} -COPY --chown=dja:anaconda ./README.md ./requirements.txt ./setup.py \ +COPY --chown=dja:anaconda ./README.rst ./requirements.txt ./setup.py \ /main/ COPY --chown=dja:anaconda ./pharus/*.py /main/pharus/ RUN \ diff --git a/README.md b/README.md deleted file mode 100644 index 7269661..0000000 --- a/README.md +++ /dev/null @@ -1,59 +0,0 @@ -
- 👷‍♀️ Under Construction 👷 - construction_fig -

-
- -> ⚠️ The Pharus project is still early in its life and the maintainers are currently actively developing with a priority of addressing first critical issues directly related to the deliveries of [Alpha](https://github.com/datajoint/pharus/milestone/1) and [Beta](https://github.com/datajoint/pharus/milestone/2) milestones. Please be advised that while working through our milestones, we may restructure/refactor the codebase without warning until we issue our [Official Release](https://github.com/datajoint/pharus/milestone/3) currently planned as `0.1.0` on `2021-03-31`. - -# Pharus - -A generic REST API server backend for DataJoint pipelines built on top of `flask`, `datajoint`, and `pyjwt`. - -Usage and API documentation currently available within method docstrings. See Python's `help(...)` utility. - -## Requirements for Preferred Setup - -- [Docker](https://docs.docker.com/get-docker/ ) -- [Docker Compose](https://docs.docker.com/compose/install/) - -## Run Locally w/ Docker - -- Copy a `*-docker-compose.yaml` file corresponding to your usage to `docker-compose.yaml`. This file is untracked so feel free to modify as necessary. Idea is to commit anything generic but system/setup dependent should go on 'your' version i.e. local UID/GID, etc. -- Check the first comment which will provide the best instruction on how to start the service; yes, it is a bit long. Note: Any of the keyword arguments prepended to the `docker-compose` command can be safely moved into a dedicated `.env` and read automatically if they are not evaluated i.e. `$(...)`. Below is a brief description of the non-evaluated environment variables: - -```shell -PY_VER=3.8 # Python version: 3.6|3.7|3.8 -IMAGE=djtest # Image type: djbase|djtest|djlab|djlabhub -DISTRO=alpine # Distribution: alpine|debian -AS_SCRIPT= # If 'TRUE', will not keep container alive but run tests and exit -``` - -> ⚠️ Deployment options currently being considered are [Docker Compose](https://docs.docker.com/compose/install/) and [Kubernetes](https://kubernetes.io/docs/tutorials/kubernetes-basics/). - -## Run Locally w/ Python - -- Set environment variables for port assignment (`PHARUS_PORT`, defaults to 5000) and API route prefix (`PHARUS_PREFIX` e.g. `/api`, defaults to empty string). -- For development, use CLI command `pharus`. This method supports hot-reloading so probably best coupled with `pip install -e ...`. -- For production, use `gunicorn --bind 0.0.0.0:${PHARUS_PORT} pharus.server:app`. - -## Run Tests for Development w/ Pytest and Flake8 - -- Set `pharus` testing environment variables: - ```shell - PKG_DIR=/opt/conda/lib/python3.8/site-packages/pharus # path to pharus installation - TEST_DB_SERVER=example.com:3306 # testing db server address - TEST_DB_USER=root # testing db server user (needs DDL privilege) - TEST_DB_PASS=unsecure # testing db server password - ``` -- For syntax tests, run `flake8 ${PKG_DIR} --count --select=E9,F63,F7,F82 --show-source --statistics` -- For pytest integration tests, run `pytest -sv --cov-report term-missing --cov=${PKG_DIR} /main/tests` -- For style tests, run `flake8 ${PKG_DIR} --count --max-complexity=20 --max-line-length=95 --statistics` - -## References - -- DataJoint LabBook (a companion frontend) - - https://github.com/datajoint/datajoint-labbook -- Under construction image credits - - https://www.pngfind.com/mpng/ooiim_under-construction-tape-png-under-construction-transparent-png/ diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..9c1df67 --- /dev/null +++ b/README.rst @@ -0,0 +1,104 @@ +User Documentation +================== + +.. warning:: + + The Pharus project is still early in its life and the maintainers are currently actively developing with a priority of addressing first critical issues directly related to the deliveries of `Alpha `_ and `Beta `_ milestones. Please be advised that while working through our milestones, we may restructure/refactor the codebase without warning until we issue our `Official Release `_ currently planned as ``0.1.0`` on ``2021-03-31``. + +``pharus`` is a generic REST API server backend for DataJoint pipelines built on top of ``flask``, ``datajoint``, and ``pyjwt``. + +- `Documentation `_ +- `Package `_ +- `Source `_ + +Requirements for Preferred Setup +-------------------------------- + +- `Docker `_ +- `Docker Compose `_ + +Run Locally w/ Docker +--------------------- + +- Copy a ``*-docker-compose.yaml`` file corresponding to your usage to ``docker-compose.yaml``. This file is untracked so feel free to modify as necessary. Idea is to commit anything generic but system/setup dependent should go on 'your' version i.e. local UID/GID, etc. +- Check the first comment which will provide the best instruction on how to start the service; yes, it is a bit long. Note: Any of the keyword arguments prepended to the ``docker-compose`` command can be safely moved into a dedicated ``.env`` and read automatically if they are not evaluated i.e. ``$(...)``. Below is a brief description of the non-evaluated environment variables: + + .. code-block:: bash + + PY_VER=3.8 # Python version: 3.6|3.7|3.8 + IMAGE=djtest # Image type: djbase|djtest|djlab|djlabhub + DISTRO=alpine # Distribution: alpine|debian + AS_SCRIPT= # If 'TRUE', will not keep container alive but run tests and exit + +.. note:: + + Deployment options currently being considered are `Docker Compose `_ and `Kubernetes `_. + +Run Locally w/ Python +--------------------- + +- Set environment variables for port assignment (``PHARUS_PORT``, defaults to 5000) and API route prefix (``PHARUS_PREFIX`` e.g. ``/api``, defaults to empty string). +- For development, use CLI command ``pharus``. This method supports hot-reloading so probably best coupled with ``pip install -e ...``. +- For production, use ``gunicorn --bind 0.0.0.0:${PHARUS_PORT} pharus.server:app``. + +Run Tests for Development w/ Pytest and Flake8 +---------------------------------------------- + +- Set ``pharus`` testing environment variables: + + .. code-block:: bash + + PKG_DIR=/opt/conda/lib/python3.8/site-packages/pharus # path to pharus installation + TEST_DB_SERVER=example.com:3306 # testing db server address + TEST_DB_USER=root # testing db server user (needs DDL privilege) + TEST_DB_PASS=unsecure # testing db server password + +- For syntax tests, run ``flake8 ${PKG_DIR} --count --select=E9,F63,F7,F82 --show-source --statistics`` +- For pytest integration tests, run ``pytest -sv --cov-report term-missing --cov=${PKG_DIR} /main/tests`` +- For style tests, run ``flake8 ${PKG_DIR} --count --max-complexity=20 --max-line-length=95 --statistics`` + +Creating Sphinx Documentation from Scratch +------------------------------------------ + +Recommend the follow to be ran within the ``pharus`` container in ``docs`` Docker Compose environment. + +- Run the following commands and complete the prompts as requested. + + .. code-block:: bash + + mkdir docs + cd docs + sphinx-quickstart + +- In ``docs/conf.py`` add to the beginning: + + .. code-block:: python + + import os + import sys + sys.path.insert(0, os.path.abspath('..')) + +- In ``docs/conf.py:extensions`` append ``['sphinx.ext.autodoc', 'sphinxcontrib.httpdomain']``. See ``requirements_docs.txt`` and ``docker-compose-docs.yaml`` for details on documentation dependencies. +- Run the following to automatically generate the API docs: + + .. code-block:: bash + + sphinx-apidoc -o . .. ../tests/* ../setup.py + +- Add ``modules`` within the ``toctree`` directive in ``index.rst``. +- Build the docs by running: + + .. code-block:: bash + + make html + +References +---------- + +- DataJoint LabBook (a companion frontend) + + - https://github.com/datajoint/datajoint-labbook + +- Python Tutorial for Flask, Swagger, and Automated docs + + - https://realpython.com/flask-connexion-rest-api/#reader-comments diff --git a/docker-compose-deploy.yaml b/docker-compose-deploy.yaml index fc3a83e..9ebe958 100644 --- a/docker-compose-deploy.yaml +++ b/docker-compose-deploy.yaml @@ -1,5 +1,5 @@ -# PHARUS_VERSION=0.1.0b0 docker-compose -f docker-compose-deploy.yaml pull -# PHARUS_VERSION=0.1.0b0 docker-compose -f docker-compose-deploy.yaml up -d +# PHARUS_VERSION=0.1.0b1 docker-compose -f docker-compose-deploy.yaml pull +# PHARUS_VERSION=0.1.0b1 docker-compose -f docker-compose-deploy.yaml up -d # # Intended for production deployment. # Note: You must run both commands above for minimal outage diff --git a/docker-compose-docs.yaml b/docker-compose-docs.yaml new file mode 100644 index 0000000..65d8085 --- /dev/null +++ b/docker-compose-docs.yaml @@ -0,0 +1,21 @@ +# PY_VER=3.8 IMAGE=djtest DISTRO=alpine HOST_UID=$(id -u) docker-compose -f docker-compose-docs.yaml up --exit-code-from pharus --build +# +# Used to build documentation artifacts. +version: "2.4" +services: + pharus: + image: datajoint/${IMAGE}:py${PY_VER}-${DISTRO} + user: ${HOST_UID}:anaconda + volumes: + - .:/main + - ./requirements_docs.txt:/tmp/pip_requirements.txt + command: + - sh + - -lc + - | + set -e + cd docs + echo "make" > "$$APK_REQUIREMENTS" + /entrypoint.sh echo done. + rm "$$APK_REQUIREMENTS" + make html diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..eef0ba2 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,51 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + + +# -- Project information ----------------------------------------------------- + +project = 'Pharus' +copyright = '2021, DataJoint Contributors' +author = 'DataJoint Contributors' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.autodoc', 'sphinxcontrib.httpdomain'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/dev_notes.rst b/docs/dev_notes.rst new file mode 100644 index 0000000..c229146 --- /dev/null +++ b/docs/dev_notes.rst @@ -0,0 +1,4 @@ +General Notes +============= + +Dev docs TBD. diff --git a/docs/developer.rst b/docs/developer.rst new file mode 100644 index 0000000..e58ad1a --- /dev/null +++ b/docs/developer.rst @@ -0,0 +1,8 @@ +Developer Documentation +======================= + +.. toctree:: + :maxdepth: 2 + + dev_notes + modules diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..e77e1b7 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,21 @@ +.. Pharus documentation master file, created by + sphinx-quickstart on Tue Mar 2 17:17:52 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to Pharus's documentation! +================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + user + developer + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/modules.rst b/docs/modules.rst new file mode 100644 index 0000000..cb286f6 --- /dev/null +++ b/docs/modules.rst @@ -0,0 +1,7 @@ +API +=== + +.. toctree:: + :maxdepth: 4 + + pharus diff --git a/docs/pharus.rst b/docs/pharus.rst new file mode 100644 index 0000000..b20d65c --- /dev/null +++ b/docs/pharus.rst @@ -0,0 +1,45 @@ +pharus package +============== + +.. Submodules +.. ---------- + +pharus.error module +------------------- + +.. automodule:: pharus.error + :members: + :undoc-members: + :show-inheritance: + +pharus.interface module +----------------------- + +.. automodule:: pharus.interface + :members: + :undoc-members: + :show-inheritance: + +pharus.server module +-------------------- + +.. automodule:: pharus.server + :members: + :undoc-members: + :show-inheritance: + +pharus.version module +--------------------- + +.. automodule:: pharus.version + :members: + :undoc-members: + :show-inheritance: + +.. Module contents +.. --------------- + +.. .. automodule:: pharus +.. :members: +.. :undoc-members: +.. :show-inheritance: diff --git a/docs/user.rst b/docs/user.rst new file mode 120000 index 0000000..89a0106 --- /dev/null +++ b/docs/user.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/pharus/error.py b/pharus/error.py index 2abd2a8..50e132c 100644 --- a/pharus/error.py +++ b/pharus/error.py @@ -2,12 +2,15 @@ class UnsupportedTableType(Exception): + """Exception raised on unsupported table types.""" pass class InvalidRestriction(Exception): + """Exception raised when restrictions result in no records when expected at least one.""" pass class InvalidDeleteRequest(Exception): + """Exception raised when attempting to delete >1 or <1 records.""" pass diff --git a/pharus/interface.py b/pharus/interface.py index 12b250c..d9a02a2 100644 --- a/pharus/interface.py +++ b/pharus/interface.py @@ -1,6 +1,8 @@ """Library for interfaces into DataJoint pipelines.""" import datajoint as dj from datajoint.utils import to_camel_case +from datajoint.user_tables import UserTable +from datajoint import VirtualModule import datetime import numpy as np from functools import reduce @@ -11,21 +13,21 @@ class DJConnector(): - """ - Primary connector that communicates with a DataJoint database server. - """ + """Primary connector that communicates with a DataJoint database server.""" @staticmethod - def attempt_login(database_address: str, username: str, password: str): + def attempt_login(database_address: str, username: str, password: str) -> dict: """ - Attempts to authenticate against database with given username and address + Attempts to authenticate against database with given username and address. + :param database_address: Address of database :type database_address: str :param username: Username of user :type username: str :param password: Password of user :type password: str - :return: Dictionary with keys: result(True|False), and error (if applicable) + :return: Dictionary with keys: result (``True`` | ``False``), and error (if + applicable) :rtype: dict """ dj.config['database.host'] = database_address @@ -37,13 +39,15 @@ def attempt_login(database_address: str, username: str, password: str): return dict(result=True) @staticmethod - def list_schemas(jwt_payload: dict): + def list_schemas(jwt_payload: dict) -> list: """ - List all schemas under the database - :param jwt_payload: Dictionary containing databaseAddress, username and password + List all schemas under the database. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict - :return: List of schemas names in alphabetical order excluding information_schema + :return: List of schemas names in alphabetical order (excludes ``information_schema``, + ``sys``, ``performance_schema``, ``mysql``) :rtype: list """ DJConnector.set_datajoint_config(jwt_payload) @@ -56,15 +60,17 @@ def list_schemas(jwt_payload: dict): """)] @staticmethod - def list_tables(jwt_payload: dict, schema_name: str): + def list_tables(jwt_payload: dict, schema_name: str) -> dict: """ - List all tables and their type give a schema - :param jwt_payload: Dictionary containing databaseAddress, username and password + List all tables and their type given a schema. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str - :return: Contains a key for a each table type and it corressponding table names + :return: Contains a key for each table type where values are the respective list of + table names :rtype: dict """ DJConnector.set_datajoint_config(jwt_payload) @@ -104,23 +110,24 @@ def fetch_tuples(jwt_payload: dict, schema_name: str, table_name: str, restriction: list = [], limit: int = 1000, page: int = 1, order=['KEY ASC']) -> tuple: """ - Get records as tuples from table - :param jwt_payload: Dictionary containing databaseAddress, username and password + Get records as tuples from table. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str :param table_name: Table name under the given schema; must be in camel case :type table_name: str - :param restriction: Sequence of filter cards with attributeName, operation, value - defined, defaults to [] + :param restriction: Sequence of filters as ``dict`` with ``attributeName``, + ``operation``, ``value`` keys defined, defaults to ``[]`` :type restriction: list, optional - :param limit: Max number of records to return, defaults to 1000 + :param limit: Max number of records to return, defaults to ``1000`` :type limit: int, optional - :param page: Page number to return, defaults to 1 + :param page: Page number to return, defaults to ``1`` :type page: int, optional - :param order: Sequence to order records, defaults to ['KEY ASC']. - See :class:`datajoint.fetch.Fetch` for more info. + :param order: Sequence to order records, defaults to ``['KEY ASC']``. See + :class:`~datajoint.fetch.Fetch` for more info. :type order: list, optional :return: Records in dict form and the total number of records that can be paged :rtype: tuple @@ -201,18 +208,20 @@ def filter_to_restriction(attribute_filter: dict) -> str: return rows, len(query) @staticmethod - def get_table_attributes(jwt_payload: dict, schema_name: str, table_name: str): + def get_table_attributes(jwt_payload: dict, schema_name: str, table_name: str) -> dict: """ - Method to get primary and secondary attributes of a table - :param jwt_payload: Dictionary containing databaseAddress, username and password + Method to get primary and secondary attributes of a table. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str :param table_name: Table name under the given schema; must be in camel case :type table_name: str - :return: Dict of primary, secondary attributes and with metadata: attribute_name, - type, nullable, default, autoincrement. + :return: Dict with keys ``primary_attributes``, ``secondary_attributes`` containing a + ``list`` of ``tuples`` specifying: ``attribute_name``, ``type``, ``nullable``, + ``default``, ``autoincrement``. :rtype: dict """ DJConnector.set_datajoint_config(jwt_payload) @@ -244,17 +253,18 @@ def get_table_attributes(jwt_payload: dict, schema_name: str, table_name: str): return table_attributes @staticmethod - def get_table_definition(jwt_payload: dict, schema_name: str, table_name: str): + def get_table_definition(jwt_payload: dict, schema_name: str, table_name: str) -> str: """ - Get the table definition - :param jwt_payload: Dictionary containing databaseAddress, username and password + Get the table definition. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from :type schema_name: str :param table_name: Table name under the given schema; must be in camel case :type table_name: str - :return: definition of the table + :return: Definition of the table :rtype: str """ DJConnector.set_datajoint_config(jwt_payload) @@ -267,8 +277,9 @@ def get_table_definition(jwt_payload: dict, schema_name: str, table_name: str): def insert_tuple(jwt_payload: dict, schema_name: str, table_name: str, tuple_to_insert: dict): """ - Insert record as tuple into table - :param jwt_payload: Dictionary containing databaseAddress, username and password + Insert record as tuple into table. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from @@ -287,8 +298,10 @@ def insert_tuple(jwt_payload: dict, schema_name: str, table_name: str, def record_dependency(jwt_payload: dict, schema_name: str, table_name: str, primary_restriction: dict) -> list: """ - Return summary of dependencies associated with a restricted table - :param jwt_payload: Dictionary containing databaseAddress, username and password + Return summary of dependencies associated with a restricted table. Will only show + dependencies that user has access to. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema @@ -297,8 +310,7 @@ def record_dependency(jwt_payload: dict, schema_name: str, table_name: str, :type table_name: str :param primary_restriction: Restriction to be applied to table :type primary_restriction: dict - :return: Tables that are dependant on specific records. Includes accessibility and, - if accessible, how many rows are affected. + :return: Tables that are dependent on specific records. :rtype: list """ DJConnector.set_datajoint_config(jwt_payload) @@ -314,8 +326,9 @@ def record_dependency(jwt_payload: dict, schema_name: str, table_name: str, def update_tuple(jwt_payload: dict, schema_name: str, table_name: str, tuple_to_update: dict): """ - Update record as tuple into table - :param jwt_payload: Dictionary containing databaseAddress, username and password + Update record as tuple into table. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from @@ -334,8 +347,10 @@ def update_tuple(jwt_payload: dict, schema_name: str, table_name: str, def delete_tuple(jwt_payload: dict, schema_name: str, table_name: str, tuple_to_restrict_by: dict, cascade: bool = False): """ - Delete a specific record based on the restriction given (Can only delete 1 at a time) - :param jwt_payload: Dictionary containing databaseAddress, username and password + Delete a specific record based on the restriction given (supports only deleting one at + a time). + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict :param schema_name: Name of schema to list all tables from @@ -344,7 +359,7 @@ def delete_tuple(jwt_payload: dict, schema_name: str, table_name: str, :type table_name: str :param tuple_to_restrict_by: Record to restrict the table by to delete :type tuple_to_restrict_by: dict - :param cascade: Allow for cascading delete, defaults to False + :param cascade: Allow for cascading delete, defaults to ``False`` :type cascade: bool """ DJConnector.set_datajoint_config(jwt_payload) @@ -375,13 +390,16 @@ def delete_tuple(jwt_payload: dict, schema_name: str, table_name: str, tuple_to_delete.delete(safemode=False) if cascade else tuple_to_delete.delete_quick() @staticmethod - def get_table_object(schema_virtual_module, table_name: str): + def get_table_object(schema_virtual_module: VirtualModule, table_name: str) -> UserTable: """ - Helper method for getting the table object based on the table_name provided - :param schema_virtual_module: dj.VirtualModule for accesing the schema - :type schema_virtual_module: dj.VirtualModule - :param table_name: name of the table, for part it should be parent.part + Helper method for getting the table object based on the table name provided. + + :param schema_virtual_module: Virtual module for accesing the schema + :type schema_virtual_module: :class:`~datajoint.schemas.VirtualModule` + :param table_name: Name of the table; for part it should be ``Parent.Part`` :type table_name: str + :return: DataJoint table object. + :rtype: :class:`~datajoint.user_tables.UserTable` """ # Split the table name by '.' for dealing with part tables table_name_parts = table_name.split('.') @@ -394,8 +412,9 @@ def get_table_object(schema_virtual_module, table_name: str): @staticmethod def set_datajoint_config(jwt_payload: dict): """ - Method to set credentials for database - :param jwt_payload: Dictionary containing databaseAddress, username and password + Method to set credentials for database. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings :type jwt_payload: dict """ diff --git a/pharus/server.py b/pharus/server.py index a6168d9..d46b019 100644 --- a/pharus/server.py +++ b/pharus/server.py @@ -3,6 +3,7 @@ from .interface import DJConnector from . import __version__ as version from typing import Callable +from functools import wraps # Crypto libaries from cryptography.hazmat.primitives import serialization as crypto_serialization @@ -35,14 +36,17 @@ ).decode() -def protected_route(function: Callable): +def protected_route(function: Callable) -> Callable: """ - Protected route function decorator which authenticates requests + Protected route function decorator which authenticates requests. + :param function: Function to decorate, typically routes - :type function: :class:`typing.Callable` - :return: Function output if jwt authecation is successful, otherwise return error message - :rtype: class:`typing.Callable` + :type function: :class:`~typing.Callable` + :return: Function output if jwt authetication is successful, otherwise return error + message + :rtype: :class:`~typing.Callable` """ + @wraps(function) def wrapper(): try: jwt_payload = jwt.decode(request.headers.get('Authorization').split()[1], @@ -56,39 +60,109 @@ def wrapper(): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/version") -def api_version(): +def api_version() -> str: """ - Route to check if the server is alive or not + Handler for ``/version`` route. + :return: API version :rtype: str + + .. http:get:: /version + + Route to check server health returning the API version. + + **Example request**: + + .. sourcecode:: http + + GET /version HTTP/1.1 + Host: fakeservices.datajoint.io + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/plain + + 0.1.0 + + :statuscode 200: No error. """ return version @app.route(f"{environ.get('PHARUS_PREFIX', '')}/login", methods=['POST']) -def login(): +def login() -> dict: """ - *WARNING*: Currently, this implementation exposes user database credentials as plain text - in POST body once and stores it within a bearer token as Base64 encoded for subsequent - requests. That is how the server is able to submit queries on user's behalf. Due to - this, it is required that remote hosts expose the server only under HTTPS to ensure - end-to-end encryption. Sending passwords in plain text over HTTPS in POST request body - is common and utilized by companies such as GitHub (2021) and Chase Bank (2021). On - server side, there is no caching, logging, or storage of received passwords or tokens - and thus available only briefly in memory. This means the primary vulnerable point is - client side. Users should be responsible with their passwords and bearer tokens - treating them as one-in-the-same. Be aware that if your client system happens to be - compromised, a bad actor could monitor your outgoing network requests and capture/log - your credentials. However, in such a terrible scenario, a bad actor would not only - collect credentials for your DataJoint database but also other sites such as - github.com, chase.com, etc. Please be responsible and vigilant with credentials and - tokens on client side systems. Improvements to the above strategy is currently being - tracked in https://github.com/datajoint/pharus/issues/82. - Login route which uses DataJoint database server login. Expects: - (html:POST:body): json with keys - {databaseAddress: string, username: string, password: string} - :return: Function output if jwt authecation is successful, otherwise return error message + **WARNING**: Currently, this implementation exposes user database credentials as plain + text in POST body once and stores it within a bearer token as Base64 encoded for + subsequent requests. That is how the server is able to submit queries on user's behalf. + Due to this, it is required that remote hosts expose the server only under HTTPS to ensure + end-to-end encryption. Sending passwords in plain text over HTTPS in POST request body is + common and utilized by companies such as GitHub (2021) and Chase Bank (2021). On server + side, there is no caching, logging, or storage of received passwords or tokens and thus + available only briefly in memory. This means the primary vulnerable point is client side. + Users should be responsible with their passwords and bearer tokens treating them as + one-in-the-same. Be aware that if your client system happens to be compromised, a bad + actor could monitor your outgoing network requests and capture/log your credentials. + However, in such a terrible scenario, a bad actor would not only collect credentials for + your DataJoint database but also other sites such as github.com, chase.com, etc. Please be + responsible and vigilant with credentials and tokens on client side systems. Improvements + to the above strategy is currently being tracked in + https://github.com/datajoint/pharus/issues/82. + + Handler for ``/login`` route. + + :return: Function output is encoded jwt if successful, otherwise return error message :rtype: dict + + .. http:post:: /login + + Route to get authentication token. + + **Example request**: + + .. sourcecode:: http + + POST /login HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "databaseAddress": "tutorial-db.datajoint.io", + "username": "user1", + "password": "password1" + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "jwt": "" + } + + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ # Check if request.json has the correct fields if not request.json.keys() >= {'databaseAddress', 'username', 'password'}: @@ -109,15 +183,56 @@ def login(): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/list_schemas", methods=['GET']) @protected_route -def list_schemas(jwt_payload: dict): +def list_schemas(jwt_payload: dict) -> dict: """ - API route for fetching schema. Expects: - (html:GET:Authorization): Must include in format of: bearer - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/list_schemas`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If sucessfuly sends back a list of schemas names otherwise returns error + :return: If successful then sends back a list of schemas names otherwise returns error. :rtype: dict + + .. http:get:: /list_schemas + + Route to get list of schemas. + + **Example request**: + + .. sourcecode:: http + + GET /list_schemas HTTP/1.1 + Host: fakeservices.datajoint.io + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "schemaNames": [ + "alpha_company" + ] + } + + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ # Get all the schemas try: @@ -129,16 +244,69 @@ def list_schemas(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/list_tables", methods=['POST']) @protected_route -def list_tables(jwt_payload: dict): +def list_tables(jwt_payload: dict) -> dict: """ - API route for listing all tables under a given schema name. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": } - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/list_tables`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then sends back a list of tables names otherwise returns error + :return: If successful then sends back a list of tables names otherwise returns error. :rtype: dict + + .. http:post:: /list_tables + + Route to get tables within a schema. + + **Example request**: + + .. sourcecode:: http + + POST /list_tables HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company" + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "tableTypeAndNames": { + "computed_tables": [], + "imported_tables": [], + "lookup_tables": [ + "Employee" + ], + "manual_tables": [ + "Computer" + ], + "part_tables": [] + } + } + + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: tables_dict_list = DJConnector.list_tables(jwt_payload, request.json["schemaName"]) @@ -149,20 +317,99 @@ def list_tables(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/fetch_tuples", methods=['POST']) @protected_route -def fetch_tuples(jwt_payload: dict): - """ - Route to fetch all records for a given table. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:query_params): {"limit": , "page": , "order": , - "restriction": } - (html:POST:JSON): {"schemaName": , "tableName": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. +def fetch_tuples(jwt_payload: dict) -> dict: + (""" + Handler for ``/fetch_tuple`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then sends back records as list otherwise returns error + :return: If successful then sends back dict with records and total count from query + otherwise returns error. :rtype: dict - """ + + .. http:post:: /fetch_tuple + + Route to fetch records. + + **Example request**: + + .. sourcecode:: http + + POST /fetch_tuples?limit=2&page=1&order=computer_id%20DESC&""" + "restriction=W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3BlcmF0aW9uIjogIj49Iiw" + "gInZhbHVlIjogMzJ9XQo=" + """ HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer" + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "total_count": 4, + "tuples": [ + [ + "eee3491a-86d5-4af7-a013-89bde75528bd", + "ABCDEFJHE", + "Dell", + 1611705600, + 2.2, + 32, + 11.5, + "1100.93", + 5, + 1614265209, + 0 + ], + [ + "ddd1491a-86d5-4af7-a013-89bde75528bd", + "ABCDEFJHI", + "Dell", + 1614556800, + 2.8, + 64, + 13.5, + "1200.99", + 2, + 1614564122, + null + ] + ] + } + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :query limit: Limit of how many records per page. Defaults to ``1000``. + :query page: Page requested. Defaults to ``1``. + :query order: Sort order. Defaults to ``KEY ASC``. + :query restriction: Base64-encoded ``AND`` sequence of restrictions. For example, you + could restrict as ``[{"attributeName": "computer_memory">=", "value": 32}]`` with + this param set as ``""" "W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3Bl" + """cmF0aW9uIjo``-``gIj49IiwgInZhbHVlIjogMzJ9XQo=``. Defaults to no restriction. + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. + """) try: table_tuples, total_count = DJConnector.fetch_tuples( jwt_payload=jwt_payload, @@ -180,17 +427,70 @@ def fetch_tuples(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/get_table_definition", methods=['POST']) @protected_route -def get_table_definition(jwt_payload: dict): +def get_table_definition(jwt_payload: dict) -> str: """ - Route to get table definition. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": , "tableName": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/get_table_definition`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then sends back definition for table otherwise returns error - :rtype: str + :return: If successful then sends back definition for table otherwise returns error. + :rtype: dict + + .. http:post:: /get_table_definition + + Route to get DataJoint table definition. + + **Example request**: + + .. sourcecode:: http + + POST /get_table_definition HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer" + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/plain + + # Computers that belong to the company + computer_id : uuid # unique id + --- + computer_serial : varchar(9) # manufacturer serial number + computer_brand : enum('HP','Dell') # manufacturer brand + computer_built : date # manufactured date + computer_processor : double # processing power in GHz + computer_memory : int # RAM in GB + computer_weight : float # weight in lbs + computer_cost : decimal(6,2) # purchased price + computer_preowned : tinyint # purchased as new or used + computer_purchased : datetime # purchased date and time + computer_updates=null : time # scheduled daily update timeslot + + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: table_definition = DJConnector.get_table_definition(jwt_payload, @@ -203,17 +503,139 @@ def get_table_definition(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/get_table_attributes", methods=['POST']) @protected_route -def get_table_attributes(jwt_payload: dict): +def get_table_attributes(jwt_payload: dict) -> dict: """ - Route to get table attibutes. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": , "tableName": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/get_table_attributes`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then sends back dict of table attributes otherwise returns error + :return: If successful then sends back dict of table attributes otherwise returns error. :rtype: dict + + .. http:post:: /get_table_attributes + + Route to get metadata on table attributes. + + **Example request**: + + .. sourcecode:: http + + POST /get_table_attributes HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer" + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "primary_attributes": [ + [ + "computer_id", + "uuid", + false, + null, + false + ] + ], + "secondary_attributes": [ + [ + "computer_serial", + "varchar(9)", + false, + null, + false + ], + [ + "computer_brand", + "enum('HP','Dell')", + false, + null, + false + ], + [ + "computer_built", + "date", + false, + null, + false + ], + [ + "computer_processor", + "double", + false, + null, + false + ], + [ + "computer_memory", + "int", + false, + null, + false + ], + [ + "computer_weight", + "float", + false, + null, + false + ], + [ + "computer_cost", + "decimal(6,2)", + false, + null, + false + ], + [ + "computer_preowned", + "tinyint", + false, + null, + false + ], + [ + "computer_purchased", + "datetime", + false, + null, + false + ], + [ + "computer_updates", + "time", + true, + "null", + false + ] + ] + } + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: return DJConnector.get_table_attributes(jwt_payload, @@ -225,18 +647,71 @@ def get_table_attributes(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/insert_tuple", methods=['POST']) @protected_route -def insert_tuple(jwt_payload: dict): +def insert_tuple(jwt_payload: dict) -> str: """ - Route to insert record. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": , "tableName": , - "tuple": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/insert_tuple`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then returns "Insert Successful" otherwise returns error + :return: If successful then returns ``Insert Successful`` otherwise returns error. :rtype: dict + + .. http:post:: /insert_tuple + + Route to insert a record. + + **Example request**: + + .. sourcecode:: http + + POST /insert_tuple HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer", + "tuple": { + "computer_id": "ffffffff-86d5-4af7-a013-89bde75528bd", + "computer_serial": "ZYXWVEISJ", + "computer_brand": "HP", + "computer_built": "2021-01-01", + "computer_processor": 2.7, + "computer_memory": 32, + "computer_weight": 3.7, + "computer_cost": 599.99, + "computer_preowned": 0, + "computer_purchased": "2021-02-01 13:00:00", + "computer_updates": 0 + } + } + + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/plain + + Insert Successful + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: # Attempt to insert @@ -252,18 +727,76 @@ def insert_tuple(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/record/dependency", methods=['GET']) @protected_route def record_dependency(jwt_payload: dict) -> dict: - """ - Route to insert record. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:query_params): {"schemaName": , "tableName": , - "restriction": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + (""" + Handler for ``/record/dependency`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If sucessfuly sends back a list of dependencies otherwise returns error + :return: If sucessfuly sends back a list of dependencies otherwise returns error. :rtype: dict - """ + + .. http:get:: /record/dependency + + Route to get the metadata in relation to the dependent records associated with a """ + """restricted subset of a table. + + **Example request**: + + .. sourcecode:: http + + GET /fetch_tuples?schemaName=alpha_company&tableName=Computer&""" + "restriction=W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3BlcmF0aW9uIjogIj49Iiw" + "gInZhbHVlIjogMzJ9XQo=" + """ HTTP/1.1 + Host: fakeservices.datajoint.io + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: application/json + + { + "dependencies": [ + { + "accessible": true, + "count": 7, + "schema": "alpha_company", + "table": "computer" + }, + { + "accessible": true, + "count": 2, + "schema": "alpha_company", + "table": "#employee" + } + ] + } + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :query schemaName: Schema name. + :query tableName: Table name. + :query restriction: Base64-encoded ``AND`` sequence of restrictions. For example, you + could restrict as ``[{"attributeName": "computer_memory">=", "value": 32}]`` with + this param set as ``""" "W3siYXR0cmlidXRlTmFtZSI6ICJjb21wdXRlcl9tZW1vcnkiLCAib3Bl" + """cmF0aW9uIjo``-``gIj49IiwgInZhbHVlIjogMzJ9XQo=``. + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. + """) # Get dependencies try: dependencies = DJConnector.record_dependency( @@ -276,18 +809,71 @@ def record_dependency(jwt_payload: dict) -> dict: @app.route(f"{environ.get('PHARUS_PREFIX', '')}/update_tuple", methods=['POST']) @protected_route -def update_tuple(jwt_payload: dict): +def update_tuple(jwt_payload: dict) -> str: """ - Route to update record. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": , "tableName": , - "tuple": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/update_tuple`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then returns "Update Successful" otherwise returns error + :return: If successful then returns ``Update Successful`` otherwise returns error. :rtype: dict + + .. http:post:: /update_tuple + + Route to update a record. + + **Example request**: + + .. sourcecode:: http + + POST /update_tuple HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer", + "tuple": { + "computer_id": "ffffffff-86d5-4af7-a013-89bde75528bd", + "computer_serial": "ZYXWVEISJ", + "computer_brand": "HP", + "computer_built": "2021-01-01", + "computer_processor": 2.7, + "computer_memory": 32, + "computer_weight": 3.7, + "computer_cost": 399.99, + "computer_preowned": 0, + "computer_purchased": "2021-02-01 13:00:00", + "computer_updates": 0 + } + } + + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/plain + + Update Successful + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain + :statuscode 200: No error. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: # Attempt to insert @@ -302,18 +888,82 @@ def update_tuple(jwt_payload: dict): @app.route(f"{environ.get('PHARUS_PREFIX', '')}/delete_tuple", methods=['POST']) @protected_route -def delete_tuple(jwt_payload: dict): +def delete_tuple(jwt_payload: dict) -> dict: """ - Route to delete a specific record. Expects: - (html:GET:Authorization): Must include in format of: bearer - (html:POST:JSON): {"schemaName": , "tableName": , - "restrictionTuple": } - NOTE: Table name must be in CamalCase - :param jwt_payload: Dictionary containing databaseAddress, username and password - strings. + Handler for ``/delete_tuple`` route. + + :param jwt_payload: Dictionary containing databaseAddress, username, and password strings. :type jwt_payload: dict - :return: If successful then returns "Delete Successful" otherwise returns error + :return: If successful returns ``Delete Successful`` otherwise returns error. :rtype: dict + + .. http:post:: /delete_tuple + + Route to delete a specific record. + + **Example request**: + + .. sourcecode:: http + + POST /delete_tuple HTTP/1.1 + Host: fakeservices.datajoint.io + Accept: application/json + + { + "schemaName": "alpha_company", + "tableName": "Computer", + "restrictionTuple": { + "computer_id": "4e41491a-86d5-4af7-a013-89bde75528bd" + } + } + + **Example successful response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/plain + + Delete Successful + + **Example conflict response**: + + .. sourcecode:: http + + HTTP/1.1 409 Conflict + Vary: Accept + Content-Type: application/json + + { + "error": "IntegrityError", + "error_msg": "Cannot delete or update a parent row: a foreign key constraint + fails (`alpha_company`.`#employee`, CONSTRAINT `#employee_ibfk_1` FOREIGN + KEY (`computer_id`) REFERENCES `computer` (`computer_id`) ON DELETE + RESTRICT ON UPDATE CASCADE", + "child_schema": "alpha_company", + "child_table": "Employee" + } + + **Example unexpected response**: + + .. sourcecode:: http + + HTTP/1.1 500 Internal Server Error + Vary: Accept + Content-Type: text/plain + + 400 Bad Request: The browser (or proxy) sent a request that this server could not + understand. + + :query cascade: Enable cascading delete. Accepts ``true`` or ``false``. + Defaults to ``false``. + :reqheader Authorization: Bearer + :resheader Content-Type: text/plain, application/json + :statuscode 200: No error. + :statuscode 409: Attempting to delete a record with dependents while ``cascade`` set + to ``false``. + :statuscode 500: Unexpected error encountered. Returns the error message as a string. """ try: # Attempt to delete tuple diff --git a/pharus/version.py b/pharus/version.py index 0f55d8b..c36d94e 100644 --- a/pharus/version.py +++ b/pharus/version.py @@ -1,2 +1,2 @@ """Package metadata.""" -__version__ = '0.1.0b0' +__version__ = '0.1.0b1' diff --git a/requirements_docs.txt b/requirements_docs.txt new file mode 100644 index 0000000..239acc4 --- /dev/null +++ b/requirements_docs.txt @@ -0,0 +1,5 @@ +. +pytest +sphinx +sphinx-rtd-theme +sphinxcontrib-httpdomain diff --git a/setup.py b/setup.py index 8705cc8..633f64f 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ pkg_name = 'pharus' here = path.abspath(path.dirname(__file__)) -with open(path.join(here, 'README.md'), 'r') as f: +with open(path.join(here, 'README.rst'), 'r') as f: long_description = f.read() with open(path.join(here, pkg_name, 'version.py')) as f: @@ -24,7 +24,7 @@ author_email='support@vathes.com', description='A generic REST API server backend for DataJoint pipelines.', long_description=long_description, - long_description_content_type='text/markdown', + long_description_content_type='text/x-rst', url='https://github.com/datajoint/pharus', packages=find_packages(exclude=['test*', 'docs']), classifiers=[ diff --git a/under_contruction.png b/under_contruction.png deleted file mode 100644 index 7f7ff11..0000000 Binary files a/under_contruction.png and /dev/null differ