Skip to content

Commit

Permalink
lintr
Browse files Browse the repository at this point in the history
  • Loading branch information
strengejacke committed Jan 31, 2024
1 parent 89de1d5 commit 960ae8b
Show file tree
Hide file tree
Showing 2 changed files with 39 additions and 41 deletions.
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
Type: Package
Package: bayestestR
Title: Understand and Describe Bayesian Models and Posterior Distributions
Version: 0.13.1.7
Version: 0.13.1.8
Authors@R:
c(person(given = "Dominique",
family = "Makowski",
Expand Down
78 changes: 38 additions & 40 deletions R/rope_range.R
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,10 @@ rope_range.default <- function(x, verbose = TRUE, ...) {
ret <- Map(
function(i, j, ...) .rope_range(x, i, j), information, response, response_transform, verbose
)
return(ret)
} else {
.rope_range(x, information, response, response_transform, verbose)
ret <- .rope_range(x, information, response, response_transform, verbose)
}
ret
}


Expand Down Expand Up @@ -114,44 +114,42 @@ rope_range.mlm <- function(x, verbose = TRUE, ...) {

.rope_range <- function(x, information = NULL, response = NULL, response_transform = NULL, verbose = TRUE) {
negligible_value <- tryCatch(
{
if (!is.null(response_transform) && grepl("log", response_transform, fixed = TRUE)) {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (information$is_linear && information$link_function == "log") {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (information$family == "lognormal") {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (!is.null(response) && information$link_function == "identity") {
# Linear Models
0.1 * stats::sd(response, na.rm = TRUE)
# 0.1 * stats::sigma(x) # https://github.com/easystats/bayestestR/issues/364
} else if (information$is_logit) {
# Logistic Models (any)
# Sigma==pi / sqrt(3)
0.1 * pi / sqrt(3)
} else if (information$is_probit) {
# Probit models
# Sigma==1
0.1 * 1
} else if (information$is_correlation) {
# Correlations
# https://github.com/easystats/bayestestR/issues/121
0.05
} else if (information$is_count) {
# Not sure about this
sig <- stats::sigma(x)
if (is.null(sig) || length(sig) == 0 || is.na(sig)) stop()
0.1 * sig
} else {
# Default
stop()
}
if (!is.null(response_transform) && grepl("log", response_transform, fixed = TRUE)) {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (information$is_linear && information$link_function == "log") {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (information$family == "lognormal") {
# for log-transform, we assume that a 1% change represents the ROPE adequately
# see https://github.com/easystats/bayestestR/issues/487
0.01
} else if (!is.null(response) && information$link_function == "identity") {
# Linear Models
0.1 * stats::sd(response, na.rm = TRUE)
# 0.1 * stats::sigma(x) # https://github.com/easystats/bayestestR/issues/364
} else if (information$is_logit) {
# Logistic Models (any)
# Sigma==pi / sqrt(3)
0.1 * pi / sqrt(3)
} else if (information$is_probit) {
# Probit models
# Sigma==1
0.1 * 1
} else if (information$is_correlation) {
# Correlations
# https://github.com/easystats/bayestestR/issues/121
0.05
} else if (information$is_count) {
# Not sure about this
sig <- stats::sigma(x)
if (is.null(sig) || length(sig) == 0 || is.na(sig)) stop(call. = FALSE)
0.1 * sig
} else {
# Default
stop(call. = FALSE)
},
error = function(e) {
if (isTRUE(verbose)) {
Expand Down

0 comments on commit 960ae8b

Please sign in to comment.