Skip to content

Commit 3c9939c

Browse files
Added more detailed logging.
1 parent cb70e5f commit 3c9939c

File tree

6 files changed

+18
-6
lines changed

6 files changed

+18
-6
lines changed

src/fmcore/algorithm/vllm.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -46,12 +46,14 @@ def set_params(cls, params: Dict) -> Dict:
4646
params,
4747
param="max_model_len",
4848
alias=[
49+
"max_length",
4950
"max_len",
50-
"max_model_len",
5151
"max_sequence_length",
5252
"max_sequence_len",
5353
"max_input_length",
5454
"max_input_len",
55+
"max_model_length",
56+
"max_model_len",
5557
],
5658
)
5759
params["generation_params"] = TextGenerationParamsMapper.of(

src/fmcore/framework/_algorithm.py

+1
Original file line numberDiff line numberDiff line change
@@ -230,6 +230,7 @@ def create_hyperparams(cls, hyperparams: Optional[Dict] = None) -> Hyperparamete
230230
@classmethod
231231
def convert_params(cls, params: Dict) -> Dict:
232232
## Convert and validate parameters for the algorithm
233+
# print(f'params for {cls.class_name}=\n{params}')
233234
cls.set_default_param_values(params)
234235
## This allows us to create a new Algorithm instance without specifying `hyperparams`.
235236
## If it is specified, we will pick cls.Hyperparameters, which can be overridden by the subclass.

src/fmcore/framework/_chain/Chain.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -291,8 +291,8 @@ def run(
291291
background: bool = False,
292292
tracker: Optional[Union[Tracker, Dict, str]] = None,
293293
notifier: Optional[Union[Notifier, Dict, str]] = None,
294-
store_step_inputs: bool = False,
295-
store_step_outputs: bool = False,
294+
store_step_inputs: bool = True,
295+
store_step_outputs: bool = True,
296296
after: Optional[ChainExecution] = None,
297297
after_wait: conint(ge=0) = 15,
298298
step_wait: confloat(ge=0.0) = 0.0,

src/fmcore/framework/_dataset.py

+4
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,10 @@ def concat(
8585
@model_validator(mode="before")
8686
@classmethod
8787
def _set_dataset_params(cls, params: Dict) -> Dict:
88+
if "data_schema" not in params:
89+
raise ValueError(
90+
f"Cannot create instance of class '{cls.class_name}' without passing `data_schema` parameter."
91+
)
8892
data_schema: Union[Schema, MLTypeSchema] = params["data_schema"]
8993
if isinstance(data_schema, dict):
9094
## We need to infer the schema:

src/fmcore/framework/_predictions.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@
3636
MLTypeSchema,
3737
TaskOrStr,
3838
)
39-
from fmcore.framework._task_mixins import InputOutputDataMixin, SchemaValidationError
4039
from fmcore.framework._dataset import Dataset
40+
from fmcore.framework._task_mixins import InputOutputDataMixin, SchemaValidationError
4141

4242
Predictions = "Predictions"
4343
Visualization = "Visualization"
@@ -69,6 +69,11 @@ def _pre_registration_hook(cls):
6969
@model_validator(mode="before")
7070
@classmethod
7171
def _set_predictions_params(cls, params: Dict) -> Dict:
72+
if "data_schema" not in params:
73+
raise ValueError(
74+
f"Cannot create instance of class '{cls.class_name}' without passing `data_schema` parameter."
75+
)
76+
7277
params["data_schema"]: Schema = Schema.of(params["data_schema"], schema_template=cls.schema_template)
7378
# data_schema: Union[Schema, MLTypeSchema] = params['data_schema']
7479
# if isinstance(data_schema, dict):

src/fmcore/framework/_task/text_generation.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -752,12 +752,12 @@ def _create_predictions(self, batch: Prompts, predictions: Dict, **kwargs) -> Ne
752752

753753

754754
class LanguageModelTaskMixin(Algorithm, ABC):
755-
lm: Optional[Union[GenerativeLM, Any]] = None
755+
lm: Optional[Any] = None
756756
icl_dataset: Optional[Dataset] = None
757757
icl_sampler: Optional[ICLSampler] = None ## Will be not-None when icl_dataset is not-None.
758758

759759
class Hyperparameters(Algorithm.Hyperparameters):
760-
lm: Optional[Dict] ## Params for llm
760+
lm: Optional[Dict] = None ## Params for llm
761761
batch_size: Optional[conint(ge=1)] = 1 ## By default, predict 1 row at a time.
762762
prompt_template: constr(min_length=1)
763763
icl_template: Optional[constr(min_length=1)] = None

0 commit comments

Comments
 (0)