Encode training data.

encode_train_data(
  context,
  X,
  y,
  unit_shape,
  dtype = "classification",
  scale_type = "none",
  path = NULL
)

Arguments

context:

(Context) - Context for HE.

X:

(data.frame) - Input features, 2-dimentional data frame.

y:

(numeric) - Input labels, 1-dimentaion numeric vector.

unit_shape:

(integer vector) - Unit encoding shape of matrix.

dtype:

(Optional: string, "regression", "classificaion", default "classification") - Type of target.

scale_type:

(Optional: string, default "none") - Type of the scaler.

path:

(Optional: string) - path of the dataset.

Value

DataSet, heaan_sdk.DataSet

Examples

if (FALSE) {
params <- heaan_sdk.HEParameter("FGb")
context <- heaan_sdk.Context(
     params,
     key_dir_path = key_dir_path,
     load_keys = "all",
     generate_keys = TRUE)
library(caret)
data(iris)
set.seed(34)
trainIndex <- createDataPartition(iris$Species,
             times = 1, p = 0.8, list = FALSE)
X_train <- iris[trainIndex, 1:4]
X_test <- iris[-trainIndex, 1:4]
y_train <- as.integer(iris[trainIndex, 5]) - 1
y_test <- as.integer(iris[-trainIndex, 5])- 1
classes <- c(0, 1, 2)
num_feature <- ncol(X_train)
batch_size <- 128
unit_shape <- (as.integer(c(batch_size,
             floor(py_to_r(context$num_slots) / batch_size))))
train_data <- encode_train_data(context,
                                X_train,
                                y_train,
                                unit_shape,
                                dtype = "classification",
                                path = "./train_data")
}