From b20af8f68d3e3ebab753c0da858d2a9cad16a64a Mon Sep 17 00:00:00 2001 From: saviola Date: Sun, 6 Dec 2015 11:31:03 +0100 Subject: [PATCH] Add DOIs in description, fix utils / stats imports - add DOIs for references in the description - import stats package - prefix utils function with utils:: --- DESCRIPTION | 21 ++++++++++++--------- NAMESPACE | 1 + R/interface.R | 4 ++-- R/mnist.R | 2 +- 4 files changed, 16 insertions(+), 12 deletions(-) diff --git a/DESCRIPTION b/DESCRIPTION index 2a23e26..9998f07 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -14,22 +14,25 @@ Description: The darch package is built on the basis of the code from G. E. nets). This package is for generating neural networks with many layers (deep architectures) and train them with the method introduced by the publications "A fast learning algorithm for deep belief nets" (G. E. Hinton, S. Osindero, - Y. W. Teh) and "Reducing the dimensionality of data with neural networks" - (G. E. Hinton, R. R. Salakhutdinov). This method includes a pre training - with the contrastive divergence method published by G.E Hinton (2002) and a - fine tuning with common known training algorithms like backpropagation or - conjugate gradients. Additionally, supervised fine-tuning can be enhanced - with maxout and dropout, two recently developed techniques to improve - fine-tuning for deep learning. + Y. W. Teh; doi: 10.1162/neco.2006.18.7.1527) and "Reducing the + dimensionality of data with neural networks" (G. E. Hinton, R. R. + Salakhutdinov; doi: 10.1126/science.1127647). This method includes a pre + training with the contrastive divergence method published by G.E Hinton + (2002; doi: 10.1162/089976602760128018) and a fine tuning with common known + training algorithms like backpropagation or conjugate gradients. + Additionally, supervised fine-tuning can be enhanced with maxout and + dropout, two recently developed techniques to improve fine-tuning for deep + learning. License: GPL (>= 2) | file LICENSE URL: https://github.com/maddin79/darch BugReports: https://github.com/maddin79/darch/issues Depends: R (>= 3.0.0) Imports: + stats, + methods, futile.logger (>= 1.4.1), - ff (>= 2.2-13), - methods + ff (>= 2.2-13) Suggests: gputools Collate: diff --git a/NAMESPACE b/NAMESPACE index 01aeb43..96788db 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -186,3 +186,4 @@ exportMethods(validateDataSet) import(ff) import(futile.logger) import(methods) +import(stats) diff --git a/R/interface.R b/R/interface.R index a5368ad..ecdf968 100644 --- a/R/interface.R +++ b/R/interface.R @@ -49,7 +49,7 @@ assign("matMult", `%*%`, darch.env) #' 0.10.0\cr Date: \tab 2015-11-12\cr License: \tab GPL-2 or later\cr #' LazyLoad: \tab yes\cr } #' -#' @import ff futile.logger methods +#' @import ff futile.logger methods stats #' #' @author Martin Drees \email{mdrees@@stud.fh-dortmund.de} and contributors. #' @keywords package Neural Networks darch Deep-Belief-Networks Restricted @@ -511,7 +511,7 @@ print.DArch <- function(x, ...) needleBody <- body(needle) needleBodyLength <- length(needleBody) - for (functionName in lsf.str("package:darch")) + for (functionName in utils::lsf.str("package:darch")) { functionBody <- body(functionName) diff --git a/R/mnist.R b/R/mnist.R index df3c5b7..8bb83aa 100644 --- a/R/mnist.R +++ b/R/mnist.R @@ -184,7 +184,7 @@ provideMNIST <- function (folder="data/", download=F) fileNameTestImages, fileNameTestLabels)) { statusCodes <- c(statusCodes, - download.file(paste0(mnistUrl, file), + utils::download.file(paste0(mnistUrl, file), paste0(folder, file))) }