diff --git a/README.md b/README.md index 3b6270c..8814c30 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ tasks. - do not try to invent new set of high level programming abstractions (yet): use a low level programming model (IPython.parallel) to finely - control the cluster elements and messages transfered and help identify + control the cluster elements and messages transferred and help identify what are the practical underlying constraints in distributed machine learning setting. diff --git a/pyrallel/model_selection.py b/pyrallel/model_selection.py index 241ce81..0429d2d 100644 --- a/pyrallel/model_selection.py +++ b/pyrallel/model_selection.py @@ -90,7 +90,7 @@ def reset(self): # Abort any other previously scheduled tasks self.abort() - # Schedule a new batch of evalutation tasks + # Schedule a new batch of evaluation tasks self.task_groups, self.all_parameters = [], [] # Collect temporary files: