Skip to content

Commit

Permalink
add @returns to optimizers
Browse files Browse the repository at this point in the history
  • Loading branch information
t-kalinowski committed Feb 4, 2024
1 parent 8f45b8c commit deb599a
Show file tree
Hide file tree
Showing 2 changed files with 14 additions and 0 deletions.
12 changes: 12 additions & 0 deletions R/optimizers.R
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@
#' For forward/backward compatability.
#'
#' @export
#' @returns an `Optimizer` instance
#' @family optimizers
#' @seealso
#' + <https://keras.io/api/optimizers/adadelta#adadelta-class>
Expand Down Expand Up @@ -228,6 +229,7 @@ function (learning_rate = 0.001, rho = 0.95, epsilon = 1e-07,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/adafactor#adafactor-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Adafactor>
Expand Down Expand Up @@ -336,6 +338,7 @@ function (learning_rate = 0.001, beta_2_decay = -0.8, epsilon_1 = 1e-30,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/adagrad#adagrad-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Adagrad>
Expand Down Expand Up @@ -457,6 +460,7 @@ function (learning_rate = 0.001, initial_accumulator_value = 0.1,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/adam#adam-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Adam>
Expand Down Expand Up @@ -587,6 +591,7 @@ function (learning_rate = 0.001, beta_1 = 0.9, beta_2 = 0.999,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/adamax#adamax-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Adamax>
Expand Down Expand Up @@ -717,6 +722,7 @@ function (learning_rate = 0.001, beta_1 = 0.9, beta_2 = 0.999,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/adamw#adamw-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/AdamW>
Expand Down Expand Up @@ -879,6 +885,7 @@ function (learning_rate = 0.001, weight_decay = 0.004, beta_1 = 0.9,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/ftrl#ftrl-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Ftrl>
Expand Down Expand Up @@ -999,6 +1006,7 @@ function (learning_rate = 0.001, learning_rate_power = -0.5,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
# @seealso
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Lion>
#' @tether keras.optimizers.Lion
Expand Down Expand Up @@ -1110,6 +1118,7 @@ function (learning_rate = 0.001, beta_1 = 0.9, beta_2 = 0.99,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
# @seealso
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/LossScaleOptimizer>
#' @tether keras.optimizers.LossScaleOptimizer
Expand Down Expand Up @@ -1223,6 +1232,7 @@ function (inner_optimizer, initial_scale = 32768, dynamic_growth_steps = 2000L,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/Nadam#nadam-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/Nadam>
Expand Down Expand Up @@ -1350,6 +1360,7 @@ function (learning_rate = 0.001, beta_1 = 0.9, beta_2 = 0.999,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/rmsprop#rmsprop-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/RMSprop>
Expand Down Expand Up @@ -1469,6 +1480,7 @@ function (learning_rate = 0.001, rho = 0.9, momentum = 0, epsilon = 1e-07,
#'
#' @export
#' @family optimizers
#' @returns an `Optimizer` instance
#' @seealso
#' + <https://keras.io/api/optimizers/sgd#sgd-class>
# + <https://www.tensorflow.org/api_docs/python/tf/keras/optimizers/SGD>
Expand Down
2 changes: 2 additions & 0 deletions tools/fix-roxy-blocks.R
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ missing_returns_section <- function(block) {

# Missing @returns
walk_roxy_blocks(function(block) {
if(roxygen2::block_has_tags(block, "noRd"))
return()
if(missing_returns_section(block)) {
file <- block$file
line <- block$line
Expand Down

0 comments on commit deb599a

Please sign in to comment.