Skip to content

Commit

Permalink
good practice
Browse files Browse the repository at this point in the history
check good practice and continuous integration badge
  • Loading branch information
giovsaraceno committed Sep 7, 2024
1 parent badf613 commit bf3a03b
Show file tree
Hide file tree
Showing 8 changed files with 51 additions and 64 deletions.
48 changes: 24 additions & 24 deletions R/clustering_functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -128,11 +128,11 @@
#' @srrstats {UL1.4} assumptions are taken into consideration
#'
#' @export
setGeneric("pkbc",function(dat,
nClust=NULL,
setGeneric("pkbc",function(dat,
nClust = NULL,
maxIter = 300,
stoppingRule = 'loglik',
initMethod = 'sampleData',
stoppingRule = "loglik",
initMethod = "sampleData",
numInit = 10){

standardGeneric("pkbc")
Expand All @@ -154,20 +154,20 @@ setGeneric("pkbc",function(dat,
#' @export
setMethod("pkbc", signature(dat = "ANY"),
function(dat,
nClust=NULL,
nClust = NULL,
maxIter = 300,
stoppingRule = 'loglik',
initMethod = 'sampleData',
stoppingRule = "loglik",
initMethod = "sampleData",
numInit = 10){
# Constant defining threshold by which log likelihood must change
# to continue iterations, if applicable.
LL_TOL <- 1e-7

# validate input
if(is.null(nClust)){
if(is.null(nClust)) {
stop("Input parameter nClust is required. Provide one specific
value or a set of possible values.")
} else if(is.vector(nClust) & is.numeric(nClust)){
} else if(is.vector(nClust) & is.numeric(nClust)) {
if(any(nClust < 1)){
stop('Values in the input parameter nClust must be
greater than 0')
Expand All @@ -177,34 +177,34 @@ setMethod("pkbc", signature(dat = "ANY"),
values")
}
if (maxIter < 1) {
stop('Input parameter maxIter must be greater than 0')
stop("Input parameter maxIter must be greater than 0")
}
if ( !(stoppingRule %in% c('max', 'membership', 'loglik')) ) {
stop(paste('Unrecognized value "', stoppingRule, '" in input
parameter stoppingRule.', sep=''))
if ( !(stoppingRule %in% c("max", "membership", "loglik")) ) {
stop(paste("Unrecognized value ", stoppingRule, " in input
parameter stoppingRule.", sep=''))
}
if ( !(initMethod %in% c('sampleData')) ) {
stop(paste('Unrecognized value "', initMethod, '" in input
parameter initMethod.', sep=''))
if (!(initMethod %in% c("sampleData"))) {
stop(paste("Unrecognized value ", initMethod, " in input
parameter initMethod.", sep=''))
}
if (numInit < 1) {
stop('Input parameter numInit must be greater than 0')
stop("Input parameter numInit must be greater than 0")
}

# set options for stopping rule
checkMembership <- stoppingRule == 'membership'
checkLoglik <- stoppingRule == 'loglik'
checkMembership <- stoppingRule == "membership"
checkLoglik <- stoppingRule == "loglik"

if(is.data.frame(dat)){
dat <- as.matrix(dat)
} else if(!is.matrix(dat)){
stop("dat must be a matrix or a data.frame")
}
if(!is.numeric(dat)){
if(!is.numeric(dat)) {
stop("dat must be a numeric matrix or data.frame")
}

if(any(is.na(dat))){
if(any(is.na(dat))) {
stop("There are missing values in the data set!")
} else if(any(is.infinite(dat) |is.nan(dat))){
stop("There are undefined values, that is Nan, Inf, -Inf")
Expand All @@ -224,12 +224,12 @@ setMethod("pkbc", signature(dat = "ANY"),
alpha_best <- rep(-99, numClust)
rho_best <- rep(-99, numClust)
mu_best <- matrix(nrow = numClust, ncol = numVar)
normprobMat_best <- matrix(-99,nrow=numData, ncol=numClust)
if (initMethod == 'sampleData') {
normprobMat_best <- matrix(-99, nrow=numData, ncol=numClust)
if (initMethod == "sampleData") {
uniqueData <- unique(dat)
numUniqueObs <- nrow(uniqueData)
if (numUniqueObs < numClust) {
stop(paste('Only', numUniqueObs, 'unique observations.',
stop(paste("Only ", numUniqueObs, " unique observations.",
'When initMethod = "sampleData", must have more
than numClust unique observations.'
))
Expand Down
7 changes: 4 additions & 3 deletions R/critical_value.R
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,8 @@ compute_CV<-function(B, Quantile, data_pool, size_x, size_y, h, method, b=1,
"Nonparam", compute_variance)[1:2]
}

cv_res <- apply(Results,2,function(x) as.numeric(quantile(x,Quantile,na.rm=T)))
cv_res <- apply(Results,2,
function(x) as.numeric(quantile(x,Quantile,na.rm=T)))
return(list(cv=cv_res))
}
#'
Expand All @@ -87,7 +88,7 @@ compute_CV<-function(B, Quantile, data_pool, size_x, size_y, h, method, b=1,
#'
#' @details
#' For each replication, a sample of d-dimensional observations from the uniform
#' #' distribution on the Sphere are generated and the Poisson kernel-based
#' distribution on the Sphere are generated and the Poisson kernel-based
#' U-statistic is computed. After B iterations, the critical value is selected
#' as the \code{Quantile} of the empirical distribution of the computed test
#' statistics.
Expand Down Expand Up @@ -208,7 +209,7 @@ cv_ksample <- function(x, y, h, B=150, b=0.9, Quantile =0.95,
if(method=="bootstrap"){

ind_k <- unlist(lapply(1:K, function(k) sample(1:sizes[k], sizes[k],
replace=T) + cum_size[k]))
replace=T) + cum_size[k]))
ind_k <- sample(ind_k,length(ind_k),replace = F)

} else if(method=="permutation"){
Expand Down
5 changes: 3 additions & 2 deletions R/h_selection.R
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,9 @@
#' for the chosen values of \code{h_values} and \code{delta}.
#'
#' We consider target alternatives \eqn{F_\delta(\hat{\mathbf{\mu}},
#' \hat{\mathbf{\Sigma}}, \hat{\mathbf{\lambda}})}, where \eqn{\hat{\mathbf{\mu}},
#' \hat{\mathbf{\Sigma}}} and \eqn{\hat{\mathbf{\lambda}}} indicate the location,
#' \hat{\mathbf{\Sigma}}, \hat{\mathbf{\lambda}})}, where
#' \eqn{\hat{\mathbf{\mu}}, \hat{\mathbf{\Sigma}}} and
#' \eqn{\hat{\mathbf{\lambda}}} indicate the location,
#' covariance and skewness parameter estimates from the pooled sample.
#' - Compute the estimates of mean \eqn{\hat{\mu}}, covariance matrix
#' \eqn{\hat{\Sigma}} and skewness \eqn{\hat{\lambda}} from the pooled sample.
Expand Down
34 changes: 17 additions & 17 deletions R/kb.test.R
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,10 @@
#' tuning parameter \eqn{h}. \cr
#' \itemize{
#' \item **Test for Normality**: \cr
#' Let \eqn{x_1, x_2, ..., x_n} be a random sample with empirical distribution
#' function \eqn{\hat F}. To test the null hypothesis of normality, i.e.
#' \eqn{H_0:F=\mathcal{N}_d(\mu, \Sigma)}, we consider the U-statistic
#' estimate and the V-statistic estimate of the sample KBQD.
#' Let \eqn{x_1, x_2, ..., x_n} be a random sample with empirical
#' distribution function \eqn{\hat F}. To test the null hypothesis of
#' normality, i.e. \eqn{H_0:F=\mathcal{N}_d(\mu, \Sigma)}, we consider the
#' U-statistic estimate and the V-statistic estimate of the sample KBQD.
#'
#' \item **Two-sample test**: \cr
#' Let \eqn{x_1, x_2, ..., x_{n_1} \sim F} and
Expand Down Expand Up @@ -573,19 +573,19 @@ setMethod("summary", "kb.test", function(object) {

stats[length(stats) +1] <- stats_step

# pl_stat <- ggplot() +
# geom_table_npc(data = data.frame(Stat = rownames(stats_step), stats_step),
# aes(npcx = 0.5, npcy = 0.5,
# label = list(data.frame(Stat = rownames(stats_step), stats_step))),
# hjust = 0.5, vjust = 0.5) +
# # annotate('table', x = 0.5, y = 0.5,
# # label = data.frame(Stat = rownames(stats_step),stats_step),
# # hjust = 0.5, vjust = 0.5) +
# theme_void() +
# ggtitle("")+
# scale_color_brewer(palette='Set1')
#
#plot_list[[length(plot_list) + 1]] <- list(pl,pl_stat)
# pl_stat <- ggplot() +
# geom_table_npc(data = data.frame(Stat = rownames(stats_step), stats_step),
# aes(npcx = 0.5, npcy = 0.5,
# label = list(data.frame(Stat = rownames(stats_step), stats_step))),
# hjust = 0.5, vjust = 0.5) +
# # annotate('table', x = 0.5, y = 0.5,
# # label = data.frame(Stat = rownames(stats_step),stats_step),
# # hjust = 0.5, vjust = 0.5) +
# theme_void() +
# ggtitle("")+
# scale_color_brewer(palette='Set1')
#
#plot_list[[length(plot_list) + 1]] <- list(pl,pl_stat)
plot_list[[length(plot_list) + 1]] <- list(pl)

}
Expand Down
12 changes: 0 additions & 12 deletions R/pk.test.R
Original file line number Diff line number Diff line change
Expand Up @@ -257,18 +257,6 @@ setMethod("summary", "pk.test", function(object) {

stats[[i]] <- stats_step

# pl_stat <- ggplot() +
# geom_table_npc(data = data.frame(Stat = rownames(stats_step), stats_step),
# aes(npcx = 0.5, npcy = 0.5,
# label = list(data.frame(Stat = rownames(stats_step), stats_step))),
# hjust = 0.5, vjust = 0.5) +
# #annotate('table', x = 0.5, y = 0.5,
# # label = data.frame(Stat = rownames(stats_step),stats_step),
# # hjust = 0.5, vjust = 0.5) +
# theme_void() +
# ggtitle("")+
# scale_color_brewer(palette='Set1')
#
plot_list[[length(plot_list) + 1]] <- list(pl)


Expand Down
3 changes: 2 additions & 1 deletion R/pkbd_functions.R
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,8 @@ rejpsaw <- function(n, rho, mu, p){
if(p==3) ib <- c(t1+0.000001,t2-0.000001)

## Create generator object for pSaw distribution
pSaw <- Tinflex::Tinflex.setup.C(lpdf, dlpdf, d2lpdf, ib=ib,cT=1, rho=1.05)
pSaw <- Tinflex::Tinflex.setup.C(lpdf, dlpdf, d2lpdf,
ib=ib,cT=1, rho=1.05)
## Print data about generator object.
#print(gen)

Expand Down
4 changes: 0 additions & 4 deletions R/utility.R
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,6 @@ var_norm <- function(Sigma_h, V, n){

res <- 2/(n*(n-1)) * 1/(2*pi)^(d) * res

# h2 <- h^2
# h_const <- ((h2+2)/(h2^2*(h2+4)))^(d/2) -2/((h2+1)*(h2+3))^(d/2) + (h2+2)^(-d)
# res <- 2/(n*(n-1)) * 1/(2*pi)^(d) * h_const
#
return(res)
}
#'
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) [![CRAN_Status_Badge](http://www.r-pkg.org/badges/version/QuadratiK)](https://cran.r-project.org/package=QuadratiK)

[![Status at rOpenSci Software Peer Review](https://badges.ropensci.org/632_status.svg)](https://github.com/ropensci/software-review/issues/632)
[![R-CMD-check](https://github.com/giovsaraceno/QuadratiK-package/actions/workflows/R-CMD-check.yaml/badge.svg)](https://github.com/giovsaraceno/QuadratiK-package/actions/workflows/R-CMD-check.yaml)

# Collection of Methods Constructed using the Kernel-Based Quadratic Distances

Expand Down

0 comments on commit bf3a03b

Please sign in to comment.