Skip to content

Commit

Permalink
adjust to new readers for csv
Browse files Browse the repository at this point in the history
Signed-off-by: dafnapension <[email protected]>
  • Loading branch information
dafnapension committed Jan 26, 2025
1 parent 0619111 commit 134dd22
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 13 deletions.
13 changes: 4 additions & 9 deletions performance/compare_benchmark_performance_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,22 +37,17 @@
ratio_line4 = (
pr_perf["load_time"] / main_perf["load_time"] if main_perf["load_time"] > 0 else 1
)
line4 = f" Load time | {main_perf['load_time_no_initial_ms']:>11} | {pr_perf['load_time_no_initial_ms']:>11} | {ratio_line4:.2f}\n"
line4 = f" Load time | {main_perf['load_time']:>11} | {pr_perf['load_time']:>11} | {ratio_line4:.2f}\n"
line5 = f" DS Gen. inc. Load | {main_perf['generate_benchmark_dataset_time']:>11} | {pr_perf['generate_benchmark_dataset_time']:>11} | {pr_perf['generate_benchmark_dataset_time'] / main_perf['generate_benchmark_dataset_time']:.2f}\n"
line6 = f" DS Gen. exc. Load | {round(main_perf['generate_benchmark_dataset_time'] - main_perf['load_time_no_initial_ms'], 3):>11} | {round(pr_perf['generate_benchmark_dataset_time'] - pr_perf['load_time_no_initial_ms'], 3):>11} | {ratio1:.2f}\n"
line7 = f" Inference time | {main_perf['inference_time']:>11} | {pr_perf['inference_time']:>11} | {pr_perf['inference_time'] / main_perf['inference_time']:.2f}\n"
line8 = f" Evaluate time | {main_perf['evaluation_time']:>11} | {pr_perf['evaluation_time']:>11} | {ratio2:.2f}\n"
line9 = f" Benchmark Instant. | {main_perf['instantiate_benchmark_time']:>11} | {pr_perf['instantiate_benchmark_time']:>11} | {pr_perf['instantiate_benchmark_time'] / main_perf['instantiate_benchmark_time']:.2f}\n"
line10 = f" Model Instantiation| {main_perf['instantiate_model_time']:>11} | {pr_perf['instantiate_model_time']:>11} | {pr_perf['instantiate_model_time'] / main_perf['instantiate_model_time']:.2f}\n"
line6 = f" DS Gen. exc. Load | {round(main_perf['generate_benchmark_dataset_time'] - main_perf['load_time'], 3):>11} | {round(pr_perf['generate_benchmark_dataset_time'] - pr_perf['load_time'], 3):>11} | {ratio1:.2f}\n"
line7 = f" Benchmark Instant. | {main_perf['instantiate_benchmark_time']:>11} | {pr_perf['instantiate_benchmark_time']:>11} | {pr_perf['instantiate_benchmark_time'] / main_perf['instantiate_benchmark_time']:.2f}\n"

print("### Performance Comparison Results, time expressed in seconds:\n")
print(line1 + line2 + line3 + line4 + line5 + line6 + line7)
print("\n\n")
# Performance degradation check (5% threshold)
if ratio1 > 1.05:
print(
"\n**Warning**: Performance degradation in Dataset Generation and/or Evaluation exceeds 5%!"
)
print("\n**Warning**: Performance degradation in Dataset Generation exceeds 5%!")
print(
"Explore branch performance via 'python performance/bluebench_profiler.py --output_file=<path to json file>',"
"followed by 'snakeviz <the performance.prof file specified in the output json file>'."
Expand Down
10 changes: 7 additions & 3 deletions src/unitxt/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -447,9 +447,13 @@ def load_iterables(self):
return iterables

def split_generator(self, split: str) -> Generator:
dataset = pd.read_csv(
self.files[split], nrows=self.get_limit(), sep=self.sep
).to_dict("records")
dataset = self.__class__._loader_cache.get(str(self) + "_" + split, None)
if dataset is None:
reader = self.get_reader()
dataset = reader(self.files[split], **self.get_args()).to_dict("records")
self.__class__._loader_cache.max_size = settings.loader_cache_size
self.__class__._loader_cache[str(self) + "_" + split] = dataset

yield from dataset


Expand Down
2 changes: 1 addition & 1 deletion utils/.secrets.baseline
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@
"filename": "src/unitxt/loaders.py",
"hashed_secret": "840268f77a57d5553add023cfa8a4d1535f49742",
"is_verified": false,
"line_number": 595,
"line_number": 599,
"is_secret": false
}
],
Expand Down

0 comments on commit 134dd22

Please sign in to comment.