Skip to content

Commit 951b787

Browse files
committed
Merge branch 'vorel99-add-pyproject-toml' into develop
chore: merge pyproject.toml changes
2 parents b13b0c8 + 8d8de62 commit 951b787

13 files changed

+148
-155
lines changed

.github/workflows/pull-request.yml

+2-7
Original file line numberDiff line numberDiff line change
@@ -53,9 +53,7 @@ jobs:
5353
- name: Install pip dependencies
5454
run: |
5555
python -m pip install --upgrade pip
56-
python -m pip install -r requirements.txt
57-
python -m pip install -r requirements-dev.txt
58-
python -m pip install -r requirements-test.txt
56+
python -m pip install ".[dev,test]"
5957
6058
- name: Install the package
6159
run: make install
@@ -104,10 +102,7 @@ jobs:
104102
- name: Install dependencies
105103
run: |
106104
python -m pip install --upgrade pip
107-
python -m pip install -r requirements.txt
108-
python -m pip install -r requirements-dev.txt
109-
python -m pip install -r requirements-test.txt
110-
python -m pip install -r requirements-docs.txt
105+
python -m pip install ".[dev,test,docs]"
111106
112107
- name: Install the package
113108
run: make install

.github/workflows/release-deprecated.yml

+1-3
Original file line numberDiff line numberDiff line change
@@ -34,9 +34,7 @@ jobs:
3434
- name: Install dependencies
3535
run: |
3636
python -m pip install --upgrade pip
37-
python -m pip install -r requirements.txt
38-
python -m pip install -r requirements-dev.txt
39-
python -m pip install -r requirements-test.txt
37+
python -m pip install ".[dev,test]"
4038
4139
- name: Install
4240
run: make install

.github/workflows/release.yml

+1-3
Original file line numberDiff line numberDiff line change
@@ -35,9 +35,7 @@ jobs:
3535
- name: Install dependencies
3636
run: |
3737
python -m pip install --upgrade pip
38-
python -m pip install -r requirements.txt
39-
python -m pip install -r requirements-dev.txt
40-
python -m pip install -r requirements-test.txt
38+
python -m pip install ".[dev,test]"
4139
4240
- name: Install
4341
run: make install

.github/workflows/tests.yml

+5-9
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@ jobs:
5353
${{ runner.os }}-${{ matrix.pandas }}-pip-
5454
- run: |
5555
pip install --upgrade pip setuptools wheel
56-
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
57-
pip install -r requirements-test.txt
56+
pip install ".[test]" "${{ matrix.pandas }}" "${{ matrix.numpy }}"
5857
- run: echo "YDATA_PROFILING_NO_ANALYTICS=False" >> $GITHUB_ENV
5958
- run: make install
6059

@@ -102,8 +101,7 @@ jobs:
102101
${{ runner.os }}-${{ matrix.pandas }}-pip-
103102
- run: |
104103
pip install --upgrade pip setuptools wheel
105-
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
106-
pip install -r requirements-test.txt
104+
pip install ".[test]" "${{ matrix.pandas }}" "${{ matrix.numpy }}"
107105
echo "YDATA_PROFILING_NO_ANALYTICS=False" >> $GITHUB_ENV
108106
- run: make install
109107

@@ -118,8 +116,7 @@ jobs:
118116
${{ runner.os }}-${{ matrix.pandas }}-pip-
119117
- run: |
120118
pip install --upgrade pip setuptools wheel
121-
pip install -r requirements.txt "${{ matrix.pandas }}" "${{ matrix.numpy }}"
122-
pip install -r requirements-test.txt
119+
pip install ".[test]" "${{ matrix.pandas }}" "${{ matrix.numpy }}"
123120
- run: make install
124121
- run: make test_cov
125122
- run: codecov -F py${{ matrix.python-version }}-${{ matrix.os }}-${{ matrix.pandas }}-${{ matrix.numpy }}
@@ -176,14 +173,13 @@ jobs:
176173
- run: |
177174
pip install --upgrade pip setuptools wheel
178175
pip install pytest-spark>=0.6.0 pyarrow==1.0.1 pyspark=="${{ matrix.spark }}"
179-
pip install -r requirements.txt
180-
pip install -r requirements-test.txt
176+
pip install ".[test]"
181177
pip install "${{ matrix.pandas }}" "${{ matrix.numpy }}"
182178
- if: ${{ matrix.spark != '3.0.1' }}
183179
run: echo "ARROW_PRE_0_15_IPC_FORMAT=1" >> $GITHUB_ENV
184180
- run: echo "SPARK_LOCAL_IP=127.0.0.1" >> $GITHUB_ENV
185181
- run: make install
186182
- run: make install-spark-ci
187-
- run: pip install -r requirements-spark.txt # Make sure the proper version of pandas is install after everything
183+
- run: pip install ".[spark]" # Make sure the proper version of pandas is install after everything
188184
- run: make test_spark
189185

Makefile

+2-2
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ package:
2929
twine check dist/*
3030

3131
install:
32-
pip install -e .[notebook]
32+
pip install -e ".[notebook]"
3333

3434
install-docs: install ### Installs regular and docs dependencies
35-
pip install -r requirements-docs.txt
35+
pip install -e ".[docs]"
3636

3737
install-spark-ci:
3838
sudo apt-get update

docs/support-contribution/contribution_guidelines.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ To activate the local mechanisms (created using pre-commit hooks), run the
3333
following commands:
3434

3535
``` console
36-
pip install -r requirements-dev.txt
36+
pip install ".[dev]"
3737
pre-commit install --hook-type commit-msg --hook-type pre-commit
3838
```
3939

pyproject.toml

+135
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,135 @@
1+
[build-system]
2+
requires = ["setuptools"]
3+
build-backend = "setuptools.build_meta"
4+
5+
[project]
6+
name = "ydata-profiling"
7+
authors = [
8+
{name = "YData Labs Inc", email = "[email protected]"},
9+
]
10+
description="Generate profile report for pandas DataFrame"
11+
readme = "README.md"
12+
requires-python=">=3., <3.13"
13+
keywords=["pandas", "data-science", "data-analysis", "python", "jupyter", "ipython"]
14+
license = {text = "MIT"}
15+
classifiers=[
16+
"Development Status :: 5 - Production/Stable",
17+
"Topic :: Software Development :: Build Tools",
18+
"License :: OSI Approved :: MIT License",
19+
"Environment :: Console",
20+
"Operating System :: OS Independent",
21+
"Intended Audience :: Science/Research",
22+
"Intended Audience :: Developers",
23+
"Intended Audience :: Financial and Insurance Industry",
24+
"Intended Audience :: Healthcare Industry",
25+
"Topic :: Scientific/Engineering",
26+
"Framework :: IPython",
27+
"Programming Language :: Python :: 3",
28+
"Programming Language :: Python :: 3.7",
29+
"Programming Language :: Python :: 3.8",
30+
"Programming Language :: Python :: 3.9",
31+
"Programming Language :: Python :: 3.10",
32+
"Programming Language :: Python :: 3.11",
33+
"Programming Language :: Python :: 3.12",
34+
]
35+
36+
dependencies = [
37+
"scipy>=1.4.1, <1.14",
38+
"pandas>1.1, <3.0, !=1.4.0",
39+
"matplotlib>=3.5, <=3.10",
40+
"pydantic>=2",
41+
"PyYAML>=5.0.0, <6.1",
42+
"jinja2>=2.11.1, <3.2",
43+
"visions[type_image_path]>=0.7.5, <0.7.7",
44+
"numpy>=1.16.0,<2.2",
45+
# Could be optional
46+
# Related to HTML report
47+
"htmlmin==0.1.12",
48+
# Correlations
49+
"phik>=0.11.1,<0.13",
50+
# Examples
51+
"requests>=2.24.0, <3",
52+
# Progress bar
53+
"tqdm>=4.48.2, <5",
54+
"seaborn>=0.10.1, <0.14",
55+
"multimethod>=1.4, <2",
56+
# metrics
57+
"statsmodels>=0.13.2, <1",
58+
# type checking
59+
"typeguard>=3, <5",
60+
"imagehash==4.3.1",
61+
"wordcloud>=1.9.3",
62+
"dacite>=1.8",
63+
"numba>=0.56.0, <1",
64+
]
65+
66+
dynamic = ["version"]
67+
68+
[project.optional-dependencies]
69+
# dependencies for development and testing
70+
dev = [
71+
"black>=20.8b1",
72+
"isort>=5.0.7",
73+
"pre-commit>=2.8.2",
74+
"virtualenv>=20.0.33",
75+
"twine",
76+
"wheel",
77+
"myst-parser>=0.18.1",
78+
"sphinx_rtd_theme>=0.4.3",
79+
"sphinx-autodoc-typehints>=1.10.3",
80+
"sphinx-multiversion>=0.2.3",
81+
"autodoc_pydantic",
82+
]
83+
# this provides the recommended pyspark and pyarrow versions for spark to work on pandas-profiling
84+
# note that if you are using pyspark 2.3 or 2.4 and pyarrow >= 0.15, you might need to
85+
# set ARROW_PRE_0_15_IPC_FORMAT=1 in your conf/spark-env.sh for toPandas functions to work properly
86+
spark = [
87+
"pyspark>=2.3.0",
88+
"pyarrow>=2.0.0",
89+
"pandas>1.1, <2, !=1.4.0",
90+
"numpy>=1.16.0,<1.24",
91+
"visions[type_image_path]==0.7.5",
92+
]
93+
test = [
94+
"pytest",
95+
"coverage>=6.5, <8",
96+
"codecov",
97+
"pytest-cov",
98+
"pytest-spark",
99+
"nbval",
100+
"pyarrow",
101+
"twine>=3.1.1",
102+
"kaggle",
103+
]
104+
notebook = [
105+
"jupyter>=1.0.0",
106+
"ipywidgets>=7.5.1",
107+
]
108+
docs = [
109+
"mkdocs>=1.6.0,<1.7.0",
110+
"mkdocs-material>=9.0.12,<10.0.0",
111+
"mkdocs-material-extensions>=1.1.1,<2.0.0",
112+
"mkdocs-table-reader-plugin<=2.2.0",
113+
"mike>=2.1.1,<2.2.0",
114+
"mkdocstrings[python]>=0.20.0,<1.0.0",
115+
"mkdocs-badges",
116+
]
117+
unicode= [
118+
"tangled-up-in-unicode==0.2.0",
119+
]
120+
121+
[tool.setuptools.packages.find]
122+
where = ["src"]
123+
124+
[tool.setuptools.package-data]
125+
ydata_profiling = ["py.typed"]
126+
127+
[tool.setuptools]
128+
include-package-data = true
129+
130+
[project.scripts]
131+
ydata_profiling = "ydata_profiling.controller.console:main"
132+
pandas_profiling = "ydata_profiling.controller.console:main"
133+
134+
[project.urls]
135+
homepage = "https://github.com/ydataai/ydata-profiling"

requirements-dev.txt

-11
This file was deleted.

requirements-docs.txt

-7
This file was deleted.

requirements-spark.txt

-9
This file was deleted.

requirements-test.txt

-10
This file was deleted.

requirements.txt

-35
This file was deleted.

0 commit comments

Comments
 (0)