@@ -19,21 +19,31 @@ COPY pyproject.toml uv.lock* ./
1919
2020RUN uv sync --no-dev --no-cache
2121
22- # Run the Guardrails configure command to create a .guardrailsrc file
23- # Only configure if GUARDRAILS_API_KEY is provided
24- RUN uv run guardrails configure --enable-metrics --enable-remote-inferencing --token "$GUARDRAILS_API_KEY"
25-
26- # Install required guardrails validators
27- RUN uv run guardrails hub install hub://guardrails/detect_jailbreak --no-install-local-models
28- RUN uv run guardrails hub install hub://guardrails/detect_pii --no-install-local-models
29- RUN uv run guardrails hub install hub://guardrails/toxic_language>=0.0.2 --no-install-local-models
30-
31- # The ToxicLanguage Validator uses the punkt tokenizer, so we need to download that to a known directory
32- # Set the directory for nltk data
33- ENV NLTK_DATA=/opt/nltk_data
34-
35- # Download punkt data
36- RUN uv run python -m nltk.downloader -d /opt/nltk_data punkt_tab
22+ # Guardrails disabled for now to speed up builds
23+ # # Run the Guardrails configure command to create a .guardrailsrc file
24+ # # Only configure if GUARDRAILS_API_KEY is provided
25+ # RUN if [ -n "$GUARDRAILS_API_KEY" ]; then \
26+ # uv run guardrails configure --enable-metrics --enable-remote-inferencing --token "$GUARDRAILS_API_KEY"; \
27+ # else \
28+ # echo "Warning: GUARDRAILS_API_KEY not provided, skipping guardrails configuration"; \
29+ # fi
30+
31+ # # Install required guardrails validators
32+ # # Note: Removing --no-install-local-models to ensure Python packages are installed
33+ # RUN uv run guardrails hub install hub://guardrails/detect_jailbreak
34+ # RUN uv run guardrails hub install hub://guardrails/detect_pii
35+ # RUN uv run guardrails hub install hub://guardrails/toxic_language>=0.0.2
36+
37+ # # Verify packages are accessible (this will fail the build if they're not found)
38+ # RUN uv run python -c "from guardrails.hub import DetectJailbreak, DetectPII, ToxicLanguage; print('Guardrails validators imported successfully')"
39+
40+ # NLTK disabled - only needed for guardrails ToxicLanguage validator
41+ # # The ToxicLanguage Validator uses the punkt tokenizer, so we need to download that to a known directory
42+ # # Set the directory for nltk data
43+ # ENV NLTK_DATA=/opt/nltk_data
44+ #
45+ # # Download punkt data
46+ # RUN uv run python -m nltk.downloader -d /opt/nltk_data punkt_tab
3747
3848COPY . .
3949
0 commit comments