@@ -189,11 +189,8 @@ def __enter__(self):
189189 if not self ._reset and self ._attempts == 0 :
190190 raise Exception ("Must reset before entering the Store context." )
191191
192- if 0 < self ._attempts :
193- assert self ._session is None
194- assert self ._conn is None
195- assert self ._engine is None
196-
192+ # on first re-attempt, do not sleep
193+ if 2 <= self ._attempts :
197194 sleep_time = (
198195 self ._wait_secs
199196 * (self ._wait_lengthing ** self ._attempts )
@@ -729,27 +726,41 @@ def iter_experiment_trials(self, experiment_id: int):
729726 trial = self .from_db_trial (trial_orm )
730727 yield trial
731728
732- def iter_status (self , experiment_id : int ) -> Iterable [Mapping [str , object ]]:
733- # TODO(nopdive): Should this be in the store?
734- self .check_allowed ()
735- trial_orms = self ._session .query (db .Trial ).filter_by (
736- experiment_id = experiment_id
729+ def get_status (self , experiment_name : str ):
730+ sql = text (
731+ f"""
732+ SELECT
733+ t.id AS trial_id,
734+ ta.name AS task,
735+ m.name AS method,
736+ t.meta AS meta,
737+ t.replicate_num AS replicate_num,
738+ t.status AS status,
739+ t.errmsg AS errmsg,
740+ t.create_time AS create_time,
741+ t.start_time AS start_time,
742+ t.end_time AS end_time,
743+ t.runner_id AS runner_id
744+ FROM
745+ experiment e
746+ JOIN
747+ trial t on e.id = t.experiment_id
748+ JOIN
749+ task ta ON t.task_id = ta.id
750+ JOIN
751+ method m ON t.method_id = m.id
752+ WHERE
753+ e.name = '{ experiment_name } '
754+ """
737755 )
738- for trial_orm in trial_orms :
739- record = {
740- "trial_id" : trial_orm .id ,
741- "replicate_num" : trial_orm .replicate_num ,
742- "meta" : trial_orm .meta ,
743- "method" : trial_orm .method .name ,
744- "task" : trial_orm .task .name ,
745- "status" : trial_orm .status .name ,
746- "errmsg" : trial_orm .errmsg ,
747- "create_time" : trial_orm .create_time ,
748- "start_time" : trial_orm .start_time ,
749- "end_time" : trial_orm .end_time ,
750- "runner_id" : trial_orm .runner_id ,
751- }
752- yield record
756+ self .reset ()
757+ while self .do :
758+ with self :
759+ result = self ._session .execute (sql )
760+ records = result .all ()
761+ columns = result .keys ()
762+ df = pd .DataFrame .from_records (records , columns = columns )
763+ return df
753764
754765 def get_results (self , experiment_name : str ):
755766 sql = text (
@@ -784,7 +795,6 @@ def get_results(self, experiment_name: str):
784795 e.name = '{ experiment_name } '
785796 """
786797 )
787-
788798 self .reset ()
789799 while self .do :
790800 with self :
@@ -945,7 +955,6 @@ def _create_task_with_supervised(self, supervised, version):
945955 mimetype = y_mimetype ,
946956 embedded = y_bstream .getvalue (),
947957 )
948-
949958 meta_orm = db .Asset (
950959 name = meta_name ,
951960 description = f"Metadata for { supervised .name ()} " ,
@@ -977,6 +986,7 @@ def _create_task_with_supervised(self, supervised, version):
977986
978987 self ._session .add (X_orm )
979988 self ._session .add (y_orm )
989+ self ._session .add (meta_orm )
980990 self ._session .add (task_orm )
981991 self ._session .flush ()
982992
@@ -1004,7 +1014,6 @@ def _create_task_with_dataframe(self, data, version):
10041014 mimetype = outputs_mimetype ,
10051015 embedded = outputs_bstream .getvalue (),
10061016 )
1007-
10081017 meta_orm = db .Asset (
10091018 name = meta_name ,
10101019 description = f"Metadata for { data .name ()} " ,
@@ -1036,6 +1045,7 @@ def _create_task_with_dataframe(self, data, version):
10361045
10371046 self ._session .add (inputs_orm )
10381047 self ._session .add (outputs_orm )
1048+ self ._session .add (meta_orm )
10391049 self ._session .add (task_orm )
10401050 self ._session .flush ()
10411051
@@ -1231,8 +1241,8 @@ def populate_with_datasets(
12311241
12321242 if dataset_iter is None :
12331243 dataset_iter = chain (
1244+ retrieve_openml_cc18 (cache_dir = cache_dir ),
12341245 retrieve_openml_automl_regression (cache_dir = cache_dir ),
1235- retrieve_openml_automl_classification (cache_dir = cache_dir ),
12361246 )
12371247
12381248 for dataset in dataset_iter :
0 commit comments