Skip to content

Commit e6afeda

Browse files
committed
ENH: Add publishing to PyPi to continuous integration
Adds a deploy step for all 3 ci-servers used by pyradiomics (supporting Linux, Windows and Mac OSX). The deployment step is only run for tagged releases, i.e. when a build is triggered for a tag. Furthermore, this tag is checked against a regex expression, to restrict deployment only to releases (e.g. v1.0, v2.1.3) and/or release candidates (e.g. v1.0-rc1, v2.1.3-rc12). Authentication to the PyPi server is done using (private) environment variables set in the accounts on the various CI-servers. These variables will be only available to builds on branches/tags in the repository that is linked to the CI-account (i.e. not available for forked builds). Finally, the current deployment step publishes to the test server, rather than the live server. This is done for testing of the workflow. Prior to integration into the master this will be rectified.
1 parent 69933c7 commit e6afeda

File tree

6 files changed

+124
-12
lines changed

6 files changed

+124
-12
lines changed

.circleci/config.yml

Lines changed: 83 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,16 @@ jobs:
1010
- image: jupyter/datascience-notebook
1111
user: root
1212
steps:
13-
- checkout
1413
- run:
1514
# CircleCI says we need SSH and the docker doesn't have it installed
1615
name: Update
1716
command: |
1817
apt-get update
1918
apt-get install -y openssh-client openssh-server
19+
- run:
20+
name: Avoid hosts unknown for github
21+
command: echo -e "Host github.com\n\tStrictHostKeyChecking no\n" > /etc/ssh/ssh_config
22+
- checkout
2023
- run:
2124
# Jupyter datascience notebook does not support python 2 anymore, install it manually.
2225
# See also https://github.com/jupyter/docker-stacks/issues/432
@@ -55,3 +58,82 @@ jobs:
5558
jupyter nbconvert --ExecutePreprocessor.kernel_name=python2 --ExecutePreprocessor.timeout=-1 --to notebook --output-dir /tmp --execute notebooks/helloRadiomics.ipynb notebooks/helloFeatureClass.ipynb notebooks/PyRadiomicsExample.ipynb
5659
5760
jupyter nbconvert --ExecutePreprocessor.kernel_name=python3 --ExecutePreprocessor.timeout=-1 --to notebook --output-dir /tmp --execute notebooks/helloRadiomics.ipynb notebooks/helloFeatureClass.ipynb notebooks/PyRadiomicsExample.ipynb
61+
deploy:
62+
working_directory: /pyradiomics
63+
docker:
64+
- image: jupyter/datascience-notebook
65+
user: root
66+
steps:
67+
- run:
68+
# CircleCI says we need SSH and the docker doesn't have it installed
69+
name: Update
70+
command: |
71+
apt-get update
72+
apt-get install -y openssh-client openssh-server
73+
- run:
74+
name: Avoid hosts unknown for github
75+
command: echo -e "Host github.com\n\tStrictHostKeyChecking no\n" > /etc/ssh/ssh_config
76+
- checkout
77+
- run:
78+
# Jupyter datascience notebook does not support python 2 anymore, install it manually.
79+
# See also https://github.com/jupyter/docker-stacks/issues/432
80+
# Next, install python 2 kernel globally, so it can be found from the root
81+
name: Install Python 2 Kernel
82+
command: |
83+
conda create -n python2 python=2 ipykernel
84+
pip install kernda --no-cache
85+
$CONDA_DIR/envs/python2/bin/python -m ipykernel install
86+
kernda -o -y /usr/local/share/jupyter/kernels/python2/kernel.json
87+
pip uninstall kernda -y
88+
- run:
89+
name: Install pyradiomics in Python 2 and 3
90+
command: |
91+
source activate python2
92+
python -m pip install --no-cache-dir -r requirements.txt
93+
python -m pip install --no-cache-dir -r requirements-dev.txt
94+
python setup.py install
95+
source activate root
96+
python -m pip install --no-cache-dir -r requirements.txt
97+
python -m pip install --no-cache-dir -r requirements-dev.txt
98+
python setup.py install
99+
- run:
100+
name: install twine, auditwheel
101+
command: |
102+
python -m pip install auditwheel
103+
python -m pip install twine
104+
- run:
105+
name: creat source and wheel distribution
106+
command: |
107+
source activate python2
108+
python setup.py bdist_wheel
109+
source activate root
110+
python setup.py sdist bdist_wheel
111+
# Since there are no external shared libraries to bundle into the wheels
112+
# this step will fixup the wheel switching from 'linux' to 'manylinux1' tag
113+
for whl in dist/*$(uname -p).whl; do
114+
auditwheel repair $whl -w ./dist/
115+
rm $whl
116+
done
117+
- run:
118+
name: deploy source and linux wheels
119+
command: twine upload --repository-url https://test.pypi.org/legacy/ ./dist/*.whl ./dist/*.tar.gz -u $PYPI_USER -p $PYPI_PASSWORD
120+
121+
workflows:
122+
version: 2
123+
build_and_deploy:
124+
jobs:
125+
- build:
126+
filters:
127+
tags:
128+
only:
129+
- /^v?[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/
130+
- deploy:
131+
requires:
132+
- build
133+
filters:
134+
branches:
135+
ignore:
136+
- /.*/
137+
tags:
138+
only:
139+
- /^v?[0-9]+(\.[0-9]+)*(-rc[0-9]+)?/

.travis.yml

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
# Config file for automatic testing at travis-ci.org
22

3-
branches:
4-
only:
5-
- master
6-
73
language: python
84

95
matrix:
@@ -44,3 +40,13 @@ script:
4440

4541
after_success:
4642
- ci after_test
43+
44+
before_deploy:
45+
- sudo pip install twine # Twine installation requires sudo to get access to /usr/local/man
46+
deploy:
47+
provider: script
48+
skip_cleanup: true
49+
script: twine upload --repository-url https://test.pypi.org/legacy/ dist/*.whl -u $PYPI_USER -p $PYPI_PASSWORD
50+
on:
51+
tags: true
52+
condition: $TRAVIS_TAG =~ ^v?[0-9]+(\.[0-9]+)*(-rc[0-9]+)?$

MANIFEST.in

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
include CONTRIBUTING.md
1+
include CONTRIBUTING.rst
22
include LICENSE.txt
33
include README.md
44
include requirements.txt
@@ -8,10 +8,20 @@ include versioneer.py
88

99
recursive-include radiomics *
1010

11-
recursive-include data *
12-
exclude data/PyradiomicsFeatures.csv data/Baseline2PyradiomicsFeaturesDiff.csv
11+
recursive-include data/baseline *
12+
recursive-include data *_image.nrrd
13+
recursive-include data *_label.nrrd
14+
include data/README.md
15+
16+
recursive-include examples/exampleSettings *.yaml
17+
recursive-include examples batch*.py
18+
recursive-include examples hello*.py
19+
include examples/testCases.csv
1320

1421
recursive-include tests *
1522

23+
recursive-include bin *.py
24+
1625
recursive-exclude * __pycache__
1726
recursive-exclude * *.py[cod]
27+
recursive-exclude * nosetests.xml

appveyor.yml

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,3 @@
1-
branches:
2-
only:
3-
- master
4-
51
version: "0.0.1.{build}"
62

73
environment:
@@ -27,6 +23,7 @@ environment:
2723
init:
2824
- python -m pip install scikit-ci==0.13.0 scikit-ci-addons==0.11.0
2925
- python -m ci_addons --install ../addons
26+
- python -m pip install twine
3027

3128
- ps: ../addons/appveyor/rolling-build.ps1
3229

@@ -42,8 +39,19 @@ test_script:
4239
after_test:
4340
- python -m ci after_test
4441

42+
artifacts:
43+
- path: dist/*
44+
name: pypiartefacts
45+
4546
on_finish:
4647
- ps: ../addons/appveyor/enable-worker-remote-access.ps1 -check_for_block
4748

49+
deploy_script:
50+
- echo "checking deployment"
51+
- ps: if ($env:APPVEYOR_REPO_TAG_NAME -notmatch '^v?\d(\.\d)*(-rc\d+)?$') { appveyor exit }
52+
- echo "starting deployment"
53+
- twine upload --repository-url https://test.pypi.org/legacy/ dist/*.whl -u %PYPI_USER% -p %PYPI_PASSWORD%
54+
- echo "finished deployment"
55+
4856
matrix:
4957
fast_finish: false

setup.cfg

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,6 @@
1+
[metadata]
2+
description-file = README.md
3+
14
[nosetests]
25
verbosity=3
36
where=tests

setup.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@
2525
with open('requirements-setup.txt', 'r') as fp:
2626
setup_requirements = list(filter(bool, (line.strip() for line in fp)))
2727

28+
with open('README.md', 'r') as fp:
29+
long_description = fp.read()
30+
2831

2932
class NoseTestCommand(TestCommand):
3033
"""Command to run unit tests using nose driver after in-place build"""

0 commit comments

Comments
 (0)