Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ fsspec==2026.2.0
# torch
ftfy==6.3.1
# via garak
garak==0.14.1+rhaiv.5
garak==0.14.1+rhaiv.6
# via llama-stack-provider-trustyai-garak (pyproject.toml)
google-api-core==2.30.0
# via
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@
parse_digest_from_report_content,
parse_generations_from_report_content,
)
from ..utils import get_scan_base_dir
from ..utils import get_scan_base_dir, as_bool
from ..constants import (
DEFAULT_TIMEOUT,
DEFAULT_MODEL_TYPE,
Expand Down Expand Up @@ -550,12 +550,14 @@ def _run_via_kfp(
if model_auth_secret:
pipeline_args["model_auth_secret_name"] = model_auth_secret

disable_cache = as_bool(ip.get("disable_cache", False))
run = kfp_client.create_run_from_pipeline_func(
evalhub_garak_pipeline,
arguments=pipeline_args,
run_name=f"evalhub-garak-{config.id}",
namespace=kfp_config.namespace,
experiment_name=kfp_config.experiment_name,
enable_caching=not disable_cache,
)

kfp_run_id = run.run_id
Expand Down Expand Up @@ -931,6 +933,7 @@ def _build_config_from_spec(
"intents_s3_key": benchmark_config.get("intents_s3_key", profile.get("intents_s3_key", "")),
"intents_format": benchmark_config.get("intents_format", profile.get("intents_format", "csv")),
"sdg_flow_id": benchmark_config.get("sdg_flow_id", profile.get("sdg_flow_id", DEFAULT_SDG_FLOW_ID)),
"disable_cache": as_bool(benchmark_config.get("disable_cache", False)),
}

if art_intents:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from ..base_eval import GarakEvalBase
from llama_stack_provider_trustyai_garak import shield_scan
from ..errors import GarakError, GarakConfigError, GarakValidationError
from ..utils import as_bool
from dotenv import load_dotenv

load_dotenv()
Expand Down Expand Up @@ -179,6 +180,7 @@ async def run_eval(self, request: RunEvalRequest) -> Job:

sanitised_config = redact_api_keys(cmd_config)

disable_cache = as_bool(provider_params.get("disable_cache", False))
run = self.kfp_client.create_run_from_pipeline_func(
garak_scan_pipeline,
arguments={
Expand All @@ -200,6 +202,7 @@ async def run_eval(self, request: RunEvalRequest) -> Job:
run_name=f"garak-{benchmark_id.split('::')[-1]}-{job_id.removeprefix(JOB_ID_PREFIX)}",
namespace=self._config.kubeflow_config.namespace,
experiment_name=experiment_name,
enable_caching=not disable_cache,
)

async with self._jobs_lock:
Expand Down
9 changes: 9 additions & 0 deletions src/llama_stack_provider_trustyai_garak/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,15 @@
from pathlib import Path
from .constants import XDG_CACHE_HOME, XDG_DATA_HOME, XDG_CONFIG_HOME

_FALSY_STRINGS = frozenset({"false", "0", "no", "off", ""})


def as_bool(value: object) -> bool:
"""Coerce a value to bool, treating string ``'false'``/``'0'``/``'no'``/``'off'`` as False."""
if isinstance(value, str):
return value.strip().lower() not in _FALSY_STRINGS
return bool(value)


def _ensure_xdg_vars() -> None:
"""
Expand Down
Loading