LSBoost classifier

LSBoostClassifier(
  n_estimators = 100L,
  learning_rate = 0.1,
  n_hidden_features = 5L,
  reg_lambda = 0.1,
  row_sample = 1,
  col_sample = 1,
  dropout = 0,
  tolerance = 1e-04,
  direct_link = 1L,
  verbose = 1L,
  seed = 123L,
  solver = c("ridge", "lasso"),
  activation = "relu",
  n_clusters = 0,
  clustering_method = "kmeans",
  cluster_scaling = "standard",
  degree = 0,
  weights_distr = "uniform"
)

Arguments

n_estimators:

int, number of boosting iterations.

learning_rate:

float, controls the learning speed at training time.

n_hidden_features:

int

number

of nodes in successive hidden layers.

reg_lambda:

float, L2 regularization parameter for successive errors in the optimizer (at training time).

row_sample:

float, percentage of rows chosen from the training set.

col_sample:

float, percentage of columns chosen from the training set.

dropout:

float, percentage of nodes dropped from the training set.

tolerance:

float, controls early stopping in gradient descent (at training time).

direct_link:

bool, indicates whether the original features are included (True) in model's fitting or not (False).

verbose:

int, progress bar (yes = 1) or not (no = 0) (currently).

seed:

int, reproducibility seed for nodes_sim=='uniform', clustering and dropout.

solver:

str, type of 'weak' learner; currently in ('ridge', 'lasso')

activation:

str, activation function: currently 'relu', 'relu6', 'sigmoid', 'tanh'

n_clusters:

int, number of clusters for clustering.

clustering_method:

str, clustering method: currently 'kmeans', 'gmm' (Gaussian Mixture Model)

cluster_scaling:

str, scaling method for clustering: currently 'standard', 'minmax', 'robust'

degree:

int, degree of polynomial interactions features.

weights_distr:

str, distribution of weights for the hidden layer currently 'uniform', 'gaussian'

Value

An object of class LSBoostClassifier

Examples


library(datasets)

X <- as.matrix(iris[, 1:4])
y <- as.integer(iris[, 5]) - 1L

n <- dim(X)[1]
p <- dim(X)[2]
set.seed(21341)
train_index <- sample(x = 1:n, size = floor(0.8*n), replace = TRUE)
test_index <- -train_index
X_train <- as.matrix(X[train_index, ])
y_train <- as.integer(y[train_index])
X_test <- as.matrix(X[test_index, ])
y_test <- as.integer(y[test_index])

if (FALSE) {
obj <- mlsauce::LSBoostClassifier()

print(obj$get_params())

obj$fit(X_train, y_train)

print(obj$score(X_test, y_test))}