Hi @alexmosc
Please find a small example training on random multidimensional data. I trained
a small RNN for 5 epochs. This is just an example with random values and I
didn't check for accuracy but just wanted a demonstration of how training could
be done.
library("readr")
library("dplyr")
library("plotly")
library("mxnet")
seq_len = 100
samples = 192
seeds <- runif(samples, min = -pi, max = pi)
pts <- sapply(seeds, function(x) sin(x + pi/12 * (0:(seq_len))))
x <- pts[1:seq_len, ]
x <- array(x, dim = c(3, seq_len, samples/3))
y1 <- runif(32, 0.0, 1.0)
y2 <- runif(32, 0.0, 1.0)
batch.size = 8
( I have assumed 3 dimensional time series of seq_len 100 and having 64 samples
)
train.data <- mx.io.arrayiter(data = x[,,1:32, drop = F], label = y1,
batch.size = batch.size, shuffle = FALSE)
eval.data <- mx.io.arrayiter(data = x[,,-(1:32), drop = F], label = y2,
batch.size = batch.size, shuffle = FALSE)
ctx <- mx.gpu()
initializer <- mx.init.Xavier(rnd_type = "gaussian",
factor_type = "avg",
magnitude = 3)
optimizer <- mx.opt.create("adadelta", rho = 0.9, eps = 1e-5, wd = 1e-6,
clip_gradient = 1, rescale.grad = 1/batch.size)
logger <- mx.metric.logger()
epoch.end.callback <- mx.callback.log.train.metric(period = 1, logger = logger)
batch.end.callback <- mx.callback.log.train.metric(period = 50)
symbol<- rnn.graph(config = "seq-to-one", cell_type = "lstm",
num_rnn_layer = 1, num_embed = NULL, num_hidden =
4,
num_decode = 1, input_size = NULL, dropout = 0.5,
ignore_label = -1, loss_output = "softmax",
output_last_state = F, masking = T)
system.time(
model <- mx.model.buckets(symbol = symbol,
train.data = train.data,
eval.data = eval.data,
num.round = 5, ctx = ctx, verbose = TRUE,
metric = mx.metric.accuracy,
initializer = initializer, optimizer = optimizer,
batch.end.callback = NULL,
epoch.end.callback = epoch.end.callback)
)
Please let me know if this helps your use case.
[ Full content available at:
https://github.com/apache/incubator-mxnet/issues/12002 ]
This message was relayed via gitbox.apache.org for [email protected]