-
Notifications
You must be signed in to change notification settings - Fork 11
Expand file tree
/
Copy pathpyproject.toml
More file actions
113 lines (105 loc) · 2.96 KB
/
pyproject.toml
File metadata and controls
113 lines (105 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "llama-stack-provider-ragas"
version = "0.6.1"
description = "Ragas evaluation as an out-of-tree Llama Stack provider"
readme = "README.md"
requires-python = ">=3.12"
license = "Apache-2.0"
license-files = ["LICENCE"]
classifiers = [
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
]
authors = [
{name = "Diego Maniloff", email = "dmanilof@redhat.com"},
{name = "Trusty AI Team"}
]
keywords = ["llama-stack", "ragas", "evaluation"]
dependencies = [
"setuptools-scm",
"llama-stack>=0.5.0",
"llama-stack-api>=0.5.0",
"greenlet==3.2.4", # inline/files/localfs errors saying greenlet not found
"ragas==0.3.0",
"pandas<2.4.0",
"pyarrow>=21.0.0",
"requests>=2.32.5",
"datasets>=2.16.0",
"llama-stack-client>=0.5.0",
]
[project.urls]
homepage = "https://github.com/trustyai-explainability/llama-stack-provider-ragas"
repository = "https://github.com/trustyai-explainability/llama-stack-provider-ragas"
[project.optional-dependencies]
remote = ["kfp>=2.5.0", "kfp-kubernetes>=2.0.0", "kfp-pipeline-spec>=2.0.0", "kfp-server-api>=2.0.0", "s3fs>=2024.12.0", "kubernetes>=30.0.0"]
distro = ["opentelemetry-api", "opentelemetry-exporter-otlp", "aiosqlite", "ollama", "uvicorn"]
dev = [
"llama-stack-provider-ragas[distro]",
"llama-stack-provider-ragas[remote]",
"pytest",
"pytest-asyncio",
"pytest-cov",
"ipykernel",
"rich",
"pre-commit",
"ruff",
"mypy",
]
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = "test_*.py"
pythonpath = ["src"]
addopts = "-v"
log_cli = true
log_cli_level = "INFO"
markers = [
"unit: Unit tests for wrapper classes (mocked client by default)",
"lls_integration: Llama Stack integration tests",
"e2e: End-to-end tests against a deployed Llama Stack distribution on OpenShift",
]
[tool.ruff]
target-version = "py312"
[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"UP", # pyupgrade
]
ignore = [
"E501", # line too long, handled by black
"B008", # do not perform function calls in argument defaults
"C901", # too complex
]
[tool.mypy]
python_version = "3.12"
warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = false
disallow_incomplete_defs = false
check_untyped_defs = true
disallow_untyped_decorators = false
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_no_return = true
warn_unreachable = true
strict_equality = true
show_error_codes = true
ignore_missing_imports = true
exclude = [
"build",
"dist",
".venv",
".mypy_cache",
]