Fit the model according to the given data_set.

fit(
  model,
  data_set,
  num_epoch = 10L,
  lr = 0.01,
  batch_size = 128L,
  optimizer = "sgd",
  lr_scheduler = "constant",
  nesterov = TRUE,
  activation = "auto",
  regularizer = "none",
  regularize_coeff = 1,
  ...
)

Arguments

data_set:

(DataSet) - DataSet from encode_train_data.

num_epoch:

(Optional: integer, default 10) - Number of epoch.

lr:

(Optional: double, default 0.01) - Learning rate.

batch_size:

(Optional: integer, default 128) - Size of minibatch.

optimizaer:

(Optional: string "sgd", "adam", "adagrad", ..., default "sgd") - Optimizer of the model.

lr_scheduler:

(Optional: string "lambda", "constant", "multiplicative", "exponential", default "constant") - Learning rate scheduler.

nesterov:

(Optional: logical, default TRUE) - Whether to apply Nesterov acceleration.

activation:

(Optional: string "auto", "sigmoid", "sigmoid_wide", "softmax", "softmax_wide", default "auto") - Activatation function of the model. If the option is "auto", then the activation function would be selected according to the number of classes - "sigmoid" for binary classification, and "softmax" for multi-class classification. If is recommended to use "sigmoid_wide" or "softmax_wide" instead of "sigmoid" and "softmax", if the input features are not properly scaled.

regularizer:

(Optional: double, default 1.0) -

Value

invisible(logit_model)

Examples

if (FALSE) {
params <- heaan_sdk.HEParameter("FGb")
context <- heaan_sdk.Context(
     params,
     key_dir_path = key_dir_path,
     load_keys = "all",
     generate_keys = TRUE)
library(caret)
data(iris)
set.seed(34)
trainIndex <- createDataPartition(iris$Species,
             times = 1, p = 0.8, list = FALSE)
X_train <- iris[trainIndex, 1:4]
X_test <- iris[-trainIndex, 1:4]
y_train <- as.integer(iris[trainIndex, 5]) - 1
y_test <- as.integer(iris[-trainIndex, 5])- 1
classes <- c(0, 1, 2)
num_feature <- ncol(X_train)
batch_size <- 128
unit_shape <- (as.integer(c(batch_size,
             floor(py_to_r(context$num_slots) / batch_size))))
train_data <- encode_train_data(context,
                                X_train,
                                y_train,
                                unit_shape,
                                dtype = "classification",
                                path = "./train_data")
model <- hml_logit(
             context,
             unit_shape,
             classes,
             path = model_path)
}
model %>% fit(
             train_data,
             )
#> Error in model %>% fit(train_data, ): could not find function "%>%"