Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[*.py] Rename "Arguments:" to "Args:" #456

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions kerastuner/applications/augment.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class HyperImageAugment(hypermodel.HyperModel):
""" Builds HyperModel for image augmentation.
Only supporting augmentations available in Keras preprocessing layers currently.

# Arguments:
# Args:
input_shape: Optional shape tuple, e.g. `(256, 256, 3)`.
input_tensor: Optional Keras tensor (i.e. output of
`layers.Input()`) to use as image input for the model.
Expand Down Expand Up @@ -225,7 +225,7 @@ def _build_fixedaug_layers(self, inputs, hp):

def _register_transform(self, transform_name, transform_params):
"""Register a transform and format parameters for tuning the transform.
# Arguments:
# Args:
transform_name: str, the name of the transform.
trnasform_params: A number between [0, 1], a list of two numbers
between [0, 1] or None. If set to a single number x, the
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/applications/efficientnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class HyperEfficientNet(hypermodel.HyperModel):
ints [0, 255]. The output data should be one-hot encoded
with number of classes matching `classes`.

# Arguments:
# Args:

input_shape: shape tuple, e.g. `(256, 256, 3)`.
Input images will be resized if different from
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/applications/resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
class HyperResNet(hypermodel.HyperModel):
"""A ResNet HyperModel.

# Arguments:
# Args:

include_top: whether to include the fully-connected
layer at the top of the network.
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/applications/xception.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
class HyperXception(hypermodel.HyperModel):
"""An Xception HyperModel.

# Arguments:
# Args:

include_top: whether to include the fully-connected
layer at the top of the network.
Expand Down
18 changes: 9 additions & 9 deletions kerastuner/engine/base_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ class BaseTuner(stateful.Stateful):

May be subclassed to create new tuners, including for non-Keras models.

# Arguments:
# Args:
oracle: Instance of Oracle class.
hypermodel: Instance of HyperModel class
(or callable that takes hyperparameters
Expand Down Expand Up @@ -108,7 +108,7 @@ def _populate_initial_space(self):
def search(self, *fit_args, **fit_kwargs):
"""Performs a search for best hyperparameter configuations.

# Arguments:
# Args:
*fit_args: Positional arguments that should be passed to
`run_trial`, for example the training and validation data.
*fit_kwargs: Keyword arguments that should be passed to
Expand Down Expand Up @@ -154,7 +154,7 @@ def run_trial(self, trial, x, y, val_x, val_y):
self.save_model(trial.trial_id, model)
```

# Arguments:
# Args:
trial: A `Trial` instance that contains the information
needed to run this trial. Hyperparameters can be accessed
via `trial.hyperparameters`.
Expand All @@ -166,7 +166,7 @@ def run_trial(self, trial, x, y, val_x, val_y):
def save_model(self, trial_id, model, step=0):
"""Saves a Model for a given trial.

# Arguments:
# Args:
trial_id: The ID of the `Trial` that corresponds to this Model.
model: The trained model.
step: For models that report intermediate results to the `Oracle`,
Expand All @@ -178,7 +178,7 @@ def save_model(self, trial_id, model, step=0):
def load_model(self, trial):
"""Loads a Model from a given trial.

# Arguments:
# Args:
trial: A `Trial` instance. For models that report intermediate
results to the `Oracle`, generally `load_model` should load the
best reported `step` by relying of `trial.best_step`
Expand All @@ -193,7 +193,7 @@ def on_search_begin(self):
def on_trial_begin(self, trial):
"""A hook called before starting each trial.

# Arguments:
# Args:
trial: A `Trial` instance.
"""
if self.logger:
Expand All @@ -203,7 +203,7 @@ def on_trial_begin(self, trial):
def on_trial_end(self, trial):
"""A hook called after each trial is run.

# Arguments:
# Args:
trial: A `Trial` instance.
"""
# Send status to Logger
Expand All @@ -229,7 +229,7 @@ def get_best_models(self, num_models=1):
recommended to retrain your Model on the full dataset using the best
hyperparameters found during `search`.

# Arguments:
# Args:
num_models (int, optional). Number of best models to return.
Models will be returned in sorted order. Defaults to 1.

Expand All @@ -253,7 +253,7 @@ def get_best_hyperparameters(self, num_trials=1):
model = tuner.hypermodel.build(best_hp)
```

# Arguments:
# Args:
num_trials: (int, optional). Number of `HyperParameters` objects to
return. `HyperParameters` will be returned in sorted order based on
trial performance.
Expand Down
4 changes: 2 additions & 2 deletions kerastuner/engine/conditions.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def is_active(self, values):

Determines whether this condition is true for the current `Trial`.

# Arguments:
# Args:
values: Dict. The active values for this `Trial`. Keys are the
names of the hyperparameters.

Expand Down Expand Up @@ -94,7 +94,7 @@ class Parent(Condition):
b = Int('num_layers', 5, 10, conditions=[kt.conditions.Parent('a', ['dnn'])])
```

# Arguments:
# Args:
name: The name of a `HyperParameter`.
values: Values for which the `HyperParameter` this object is
passed to should be considered active.
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/engine/hypermodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(self, name=None, tunable=True):
def build(self, hp):
"""Builds a model.

# Arguments:
# Args:
hp: A `HyperParameters` instance.

# Returns:
Expand Down
20 changes: 10 additions & 10 deletions kerastuner/engine/hyperparameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def _check_int(val, arg):
class HyperParameter(object):
"""HyperParameter base class.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
default: Default value to return for the
parameter.
Expand Down Expand Up @@ -104,7 +104,7 @@ def from_config(cls, config):
class Choice(HyperParameter):
"""Choice of one value among a predefined set of possible values.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
values: List of possible values. Values must be int, float,
str, or bool. All values must be of the same type.
Expand Down Expand Up @@ -226,7 +226,7 @@ class Int(HyperParameter):
Note that unlike Python's `range` function, `max_value` is *included* in
the possible values this parameter can take on.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
min_value: Int. Lower limit of range (included).
max_value: Int. Upper limit of range (included).
Expand Down Expand Up @@ -314,7 +314,7 @@ def to_proto(self):
class Float(HyperParameter):
"""Floating point range, can be evenly divided.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
min_value: Float. Lower bound of the range.
max_value: Float. Upper bound of the range.
Expand Down Expand Up @@ -566,7 +566,7 @@ def conditional_scope(self, parent_name, parent_values):
Note that any Python code under this scope will execute
regardless of whether the condition is met.

# Arguments:
# Args:
parent_name: The name of the HyperParameter to condition on.
parent_values: Values of the parent HyperParameter for which
HyperParameters under this scope should be considered active.
Expand All @@ -586,7 +586,7 @@ def conditional_scope(self, parent_name, parent_values):
def is_active(self, hyperparameter):
"""Checks if a hyperparameter is currently active for a `Trial`.

# Arguments:
# Args:
hp: Str or `HyperParameter`. If str, checks if any
`HyperParameter` with that name is active. If `HyperParameter`,
checks that this object is active.
Expand Down Expand Up @@ -671,7 +671,7 @@ def Choice(self,
parent_values=None):
"""Choice of one value among a predefined set of possible values.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
values: List of possible values. Values must be int, float,
str, or bool. All values must be of the same type.
Expand Down Expand Up @@ -712,7 +712,7 @@ def Int(self,
Note that unlike Python's `range` function, `max_value` is *included* in
the possible values this parameter can take on.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
min_value: Int. Lower limit of range (included).
max_value: Int. Upper limit of range (included).
Expand Down Expand Up @@ -754,7 +754,7 @@ def Float(self,
parent_values=None):
"""Floating point range, can be evenly divided.

# Arguments:
# Args:
name: Str. Name of parameter. Must be unique.
min_value: Float. Lower bound of the range.
max_value: Float. Upper bound of the range.
Expand Down Expand Up @@ -866,7 +866,7 @@ def copy(self):
def merge(self, hps, overwrite=True):
"""Merges hyperparameters into this object.

Arguments:
Args:
hps: A `HyperParameters` object or list of `HyperParameter`
objects.
overwrite: bool. Whether existing `HyperParameter`s should
Expand Down
6 changes: 3 additions & 3 deletions kerastuner/engine/stateful.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,15 @@ def set_state(self, state):

This method is called during `reload`.

# Arguments:
# Args:
state: Dict. The state to restore for this object.
"""
raise NotImplementedError

def save(self, fname):
"""Saves this object using `get_state`.

# Arguments:
# Args:
fname: The file name to save to.
"""
state = self.get_state()
Expand All @@ -55,7 +55,7 @@ def save(self, fname):
def reload(self, fname):
"""Reloads this object using `set_state`.

# Arguments:
# Args:
fname: The file name to restore from.
"""
with tf.io.gfile.GFile(fname, 'r') as f:
Expand Down
14 changes: 7 additions & 7 deletions kerastuner/engine/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class Tuner(base_tuner.BaseTuner):

May be subclassed to create new tuners.

# Arguments:
# Args:
oracle: Instance of Oracle class.
hypermodel: Instance of HyperModel class
(or callable that takes hyperparameters
Expand Down Expand Up @@ -127,7 +127,7 @@ def _build_and_fit_model(self, trial, fit_args, fit_kwargs):
the input shape before building the model, adapt preprocessing layers,
and tune other fit_args and fit_kwargs.

# Arguments:
# Args:
trial: A `Trial` instance that contains the information
needed to run this trial. `Hyperparameters` can be accessed
via `trial.hyperparameters`.
Expand All @@ -146,7 +146,7 @@ def run_trial(self, trial, *fit_args, **fit_kwargs):
This method is called during `search` to evaluate a set of
hyperparameters.

# Arguments:
# Args:
trial: A `Trial` instance that contains the information
needed to run this trial. `Hyperparameters` can be accessed
via `trial.hyperparameters`.
Expand Down Expand Up @@ -202,7 +202,7 @@ def load_model(self, trial):
def on_epoch_begin(self, trial, model, epoch, logs=None):
"""A hook called at the start of every epoch.

# Arguments:
# Args:
trial: A `Trial` instance.
model: A Keras `Model`.
epoch: The current epoch number.
Expand All @@ -213,7 +213,7 @@ def on_epoch_begin(self, trial, model, epoch, logs=None):
def on_batch_begin(self, trial, model, batch, logs):
"""A hook called at the start of every batch.

# Arguments:
# Args:
trial: A `Trial` instance.
model: A Keras `Model`.
batch: The current batch number within the
Expand All @@ -225,7 +225,7 @@ def on_batch_begin(self, trial, model, batch, logs):
def on_batch_end(self, trial, model, batch, logs=None):
"""A hook called at the end of every batch.

# Arguments:
# Args:
trial: A `Trial` instance.
model: A Keras `Model`.
batch: The current batch number within the
Expand All @@ -237,7 +237,7 @@ def on_batch_end(self, trial, model, batch, logs=None):
def on_epoch_end(self, trial, model, epoch, logs=None):
"""A hook called at the end of every epoch.

# Arguments:
# Args:
trial: A `Trial` instance.
model: A Keras `Model`.
epoch: The current epoch number.
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/tuners/bayesian.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ def _get_hp_bounds(self):
class BayesianOptimization(multi_execution_tuner.MultiExecutionTuner):
"""BayesianOptimization tuning with Gaussian process.

# Arguments:
# Args:
hypermodel: Instance of HyperModel class
(or callable that takes hyperparameters
and returns a Model instance).
Expand Down
2 changes: 1 addition & 1 deletion kerastuner/tuners/hyperband.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def run_trial(self, trial, *args, **kwargs):
self.on_epoch_end(...)
```

# Arguments:
# Args:
objective: String or `kerastuner.Objective`. If a string,
the direction of the optimization (min or max) will be
inferred.
Expand Down
4 changes: 2 additions & 2 deletions kerastuner/tuners/randomsearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
class RandomSearchOracle(oracle_module.Oracle):
"""Random search oracle.

# Arguments:
# Args:
objective: String or `kerastuner.Objective`. If a string,
the direction of the optimization (min or max) will be
inferred.
Expand Down Expand Up @@ -87,7 +87,7 @@ def _populate_space(self, _):
class RandomSearch(multi_execution_tuner.MultiExecutionTuner):
"""Random search tuner.

# Arguments:
# Args:
hypermodel: Instance of HyperModel class
(or callable that takes hyperparameters
and returns a Model instance).
Expand Down
4 changes: 2 additions & 2 deletions kerastuner/tuners/sklearn_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ class Sklearn(base_tuner.BaseTuner):
Performs cross-validated hyperparameter search for Scikit-learn
models.

# Arguments:
# Args:
oracle: An instance of the `kerastuner.Oracle` class. Note that for
this `Tuner`, the `objective` for the `Oracle` should always be set
to `Objective('score', direction='max')`. Also, `Oracle`s that exploit
Expand Down Expand Up @@ -114,7 +114,7 @@ def __init__(self,
def search(self, X, y, sample_weight=None, groups=None):
"""Performs hyperparameter search.

Arguments:
Args:
X: See docstring for `model.fit` for the `sklearn` Models being tuned.
y: See docstring for `model.fit` for the `sklearn` Models being tuned.
sample_weight: (Optional). See docstring for `model.fit` for the
Expand Down