Skip to content

Commit 87a3f09

Browse files
Merge pull request #24 from aws/flex-npo
Feat: Make NovaPromptOptimizer more customizable by changing Prompting Models
2 parents 9830501 + e76e600 commit 87a3f09

File tree

4 files changed

+304
-40
lines changed

4 files changed

+304
-40
lines changed

src/amzn_nova_prompt_optimizer/__version__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,4 @@
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
14-
VERSION = "1.0.48" # pragma: no cover
14+
VERSION = "1.0.52" # pragma: no cover

src/amzn_nova_prompt_optimizer/core/optimizers/nova_meta_prompter/nova_mp_optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def __init__(self, prompt_adapter: PromptAdapter,
4242

4343

4444
def optimize(self, prompter_model_id: str = DEFAULT_PROMPTER_MODEL_ID, max_retries: int = 5):
45-
logger.info("Optimizing prompt using Nova Meta Prompter")
45+
logger.info(f"Optimizing prompt using Nova Meta Prompter with Model: {prompter_model_id}")
4646
if not self.inference_adapter:
4747
raise ValueError("Inference Adapter not passed. "
4848
"Initialize and Pass Inference Adapter to use this Optimizer")

src/amzn_nova_prompt_optimizer/core/optimizers/nova_prompt_optimizer/nova_prompt_optimizer.py

Lines changed: 37 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -24,29 +24,37 @@
2424

2525
logger = logging.getLogger(__name__)
2626

27-
NOVA_OPTIMA_MODE: Dict[str, Dict[str, Any]] = {
27+
NOVA_PROMPT_OPTIMIZER_MODE: Dict[str, Dict[str, Any]] = {
2828
"micro": {
29+
"meta_prompt_model_id": "us.amazon.nova-premier-v1:0",
30+
"prompter_model_id": "us.amazon.nova-premier-v1:0",
2931
"task_model_id": "us.amazon.nova-micro-v1:0",
3032
"num_candidates": 20,
3133
"num_trials": 30,
3234
"max_bootstrapped_demos": 4,
3335
"max_labeled_demos": 4
3436
},
3537
"lite": {
38+
"meta_prompt_model_id": "us.amazon.nova-premier-v1:0",
39+
"prompter_model_id": "us.amazon.nova-premier-v1:0",
3640
"task_model_id": "us.amazon.nova-lite-v1:0",
3741
"num_candidates": 20,
3842
"num_trials": 30,
3943
"max_bootstrapped_demos": 4,
4044
"max_labeled_demos": 4
4145
},
4246
"pro": {
47+
"meta_prompt_model_id": "us.amazon.nova-premier-v1:0",
48+
"prompter_model_id": "us.amazon.nova-premier-v1:0",
4349
"task_model_id": "us.amazon.nova-pro-v1:0",
4450
"num_candidates": 20,
4551
"num_trials": 30,
4652
"max_bootstrapped_demos": 4,
4753
"max_labeled_demos": 4
4854
},
4955
"premier": {
56+
"meta_prompt_model_id": "us.amazon.nova-premier-v1:0",
57+
"prompter_model_id": "us.amazon.nova-premier-v1:0",
5058
"task_model_id": "us.amazon.nova-premier-v1:0",
5159
"num_candidates": 20,
5260
"num_trials": 30,
@@ -58,10 +66,9 @@
5866

5967
class NovaPromptOptimizer(OptimizationAdapter):
6068
"""
61-
NovaOptima is a combination of Meta Prompting and MIPROv2 for Nova Models that yields a stable
69+
NovaPromptOptimizer is a combination of Meta Prompting and MIPROv2 for Nova Models that yields a stable
6270
prompt optimization result.
6371
"""
64-
6572
def __init__(self, prompt_adapter: PromptAdapter,
6673
inference_adapter: InferenceAdapter,
6774
dataset_adapter: DatasetAdapter,
@@ -75,34 +82,42 @@ def __init__(self, prompt_adapter: PromptAdapter,
7582
self.meta_prompt_optimization_adapter = NovaMPOptimizationAdapter(prompt_adapter, inference_adapter)
7683

7784
def optimize(self, mode: str = "pro", custom_params = None) -> PromptAdapter:
85+
if mode == "custom":
86+
if not custom_params:
87+
raise ValueError("Custom mode requires custom_params dictionary")
88+
required_keys = {"task_model_id", "num_candidates", "num_trials",
89+
"max_bootstrapped_demos", "max_labeled_demos"}
90+
if not all(key in custom_params for key in required_keys):
91+
raise ValueError(f"custom_params must contain all required keys: {required_keys}")
92+
meta_prompt_model_id = custom_params.pop("meta_prompt_model_id", None)
93+
optimization_params = custom_params
94+
else:
95+
if mode not in NOVA_PROMPT_OPTIMIZER_MODE:
96+
logger.warning(f"Mode '{mode}' not detected, defaulting to 'pro' mode")
97+
mode = "pro"
98+
config = NOVA_PROMPT_OPTIMIZER_MODE[mode].copy() # Create a copy to avoid modifying the original
99+
meta_prompt_model_id = config.pop("meta_prompt_model_id")
100+
optimization_params = config
101+
102+
78103
if not self.inference_adapter:
79104
raise ValueError("Inference Adapter not passed. "
80105
"Initialize and Pass Inference Adapter to use this Optimizer")
81-
intermediate_prompt_adapter = self.meta_prompt_optimization_adapter.optimize()
106+
if meta_prompt_model_id:
107+
intermediate_prompt_adapter = (
108+
self.meta_prompt_optimization_adapter.optimize(prompter_model_id=meta_prompt_model_id))
109+
else:
110+
intermediate_prompt_adapter = self.meta_prompt_optimization_adapter.optimize()
111+
82112
if not self.dataset_adapter or not self.metric_adapter:
83-
logger.info("[Nova Optima] No Dataset or No metric provided, running only Nova Meta Prompter")
113+
logger.info("[Nova Prompt Optimizer] No Dataset or No metric provided, running only Nova Meta Prompter")
84114
return intermediate_prompt_adapter
85115

86-
nova_optima_optimization_adapter = NovaMIPROv2OptimizationAdapter(
116+
nova_prompt_optimizer = NovaMIPROv2OptimizationAdapter(
87117
prompt_adapter=intermediate_prompt_adapter,
88118
dataset_adapter=self.dataset_adapter,
89119
metric_adapter=self.metric_adapter,
90120
inference_adapter=self.inference_adapter)
91121

92-
if mode == "custom":
93-
if not custom_params:
94-
raise ValueError("Custom mode requires custom_params dictionary")
95-
required_keys = {"task_model_id", "num_candidates", "num_trials",
96-
"max_bootstrapped_demos", "max_labeled_demos"}
97-
if not all(key in custom_params for key in required_keys):
98-
raise ValueError(f"custom_params must contain all required keys: {required_keys}")
99-
optimization_params = custom_params
100-
else:
101-
if mode not in NOVA_OPTIMA_MODE:
102-
logger.warning(f"Mode '{mode}' not detected, defaulting to 'pro' mode")
103-
optimization_params = NOVA_OPTIMA_MODE["pro"]
104-
else:
105-
optimization_params = NOVA_OPTIMA_MODE[mode]
106-
optimized_prompt_adapter = nova_optima_optimization_adapter.optimize(**optimization_params,
107-
enable_json_fallback=False)
122+
optimized_prompt_adapter = nova_prompt_optimizer.optimize(**optimization_params, enable_json_fallback=False)
108123
return optimized_prompt_adapter

0 commit comments

Comments
 (0)