forked from tlverse/sl3
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLrnr_bilstm.Rd
71 lines (61 loc) · 2.45 KB
/
Lrnr_bilstm.Rd
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/Lrnr_bilstm.R
\docType{class}
\name{Lrnr_bilstm}
\alias{Lrnr_bilstm}
\title{Bidirectional Long short-term memory Recurrent Neural Network (LSTM)}
\format{\code{\link{R6Class}} object.}
\usage{
Lrnr_bilstm
}
\value{
\code{\link{Lrnr_base}} object with methods for training and prediction
}
\description{
This learner supports bidirectinal long short-term memory recurrent neural
network algorithm. In order to use this learner, you will need keras Python
module 2.0.0 or higher. Note that all preprocessing, such as differencing and
seasonal effects for time series, should be addressed before using this
learner.
}
\section{Fields}{
\describe{
\item{\code{units}}{Positive integer, dimensionality of the output space.}
\item{\code{loss}}{Name of a loss function used.}
\item{\code{optimizer}}{name of optimizer, or optimizer object.}
\item{\code{batch_size}}{Number of samples per gradient update.}
\item{\code{epochs}}{Number of epochs to train the model.}
\item{\code{window}}{Size of the sliding window input.}
\item{\code{activation}}{The activation function to use.}
\item{\code{dense}}{regular, densely-connected NN layer. Default is 1.}
\item{\code{dropout}}{float between 0 and 1. Fraction of the input units to drop.}
}}
\seealso{
Other Learners: \code{\link{Custom_chain}},
\code{\link{Lrnr_HarmonicReg}}, \code{\link{Lrnr_arima}},
\code{\link{Lrnr_bartMachine}}, \code{\link{Lrnr_base}},
\code{\link{Lrnr_condensier}}, \code{\link{Lrnr_cv}},
\code{\link{Lrnr_dbarts}},
\code{\link{Lrnr_define_interactions}},
\code{\link{Lrnr_expSmooth}},
\code{\link{Lrnr_glm_fast}}, \code{\link{Lrnr_glmnet}},
\code{\link{Lrnr_glm}}, \code{\link{Lrnr_grf}},
\code{\link{Lrnr_h2o_grid}}, \code{\link{Lrnr_hal9001}},
\code{\link{Lrnr_independent_binomial}},
\code{\link{Lrnr_lstm}}, \code{\link{Lrnr_mean}},
\code{\link{Lrnr_nnls}}, \code{\link{Lrnr_optim}},
\code{\link{Lrnr_pca}},
\code{\link{Lrnr_pkg_SuperLearner}},
\code{\link{Lrnr_randomForest}},
\code{\link{Lrnr_ranger}}, \code{\link{Lrnr_rpart}},
\code{\link{Lrnr_rugarch}}, \code{\link{Lrnr_sl}},
\code{\link{Lrnr_solnp_density}},
\code{\link{Lrnr_solnp}},
\code{\link{Lrnr_subset_covariates}},
\code{\link{Lrnr_svm}}, \code{\link{Lrnr_tsDyn}},
\code{\link{Lrnr_xgboost}}, \code{\link{Pipeline}},
\code{\link{Stack}}, \code{\link{define_h2o_X}},
\code{\link{undocumented_learner}}
}
\concept{Learners}
\keyword{data}