diff --git a/R/commonMachineLearningRegression.R b/R/commonMachineLearningRegression.R index 661e48dd..764ec77a 100644 --- a/R/commonMachineLearningRegression.R +++ b/R/commonMachineLearningRegression.R @@ -612,7 +612,7 @@ nTest <- result[["ntest"]] plotData <- data.frame(y = c(nTrain, nTest), x = c("Train", "Test"), group = c(1, 1)) p <- ggplot2::ggplot(data = plotData, mapping = ggplot2::aes(x = group, y = y, fill = factor(x, levels = c("Test", "Train")))) + - ggplot2::geom_bar(stat = "identity", col = "black", size = 0.5) + + ggplot2::geom_bar(stat = "identity", col = "black", linewidth = 0.5) + ggplot2::scale_y_continuous(name = NULL, limits = c(0, nTrain + nTest + ((nTrain + nTest) / 5))) + # adjust limits to include "Total" text ggplot2::coord_flip() + ggplot2::labs(x = NULL) + @@ -629,7 +629,7 @@ nTest <- result[["ntest"]] plotData <- data.frame(y = c(nTrain, nValid, nTest), x = c("Train", "Validation", "Test"), group = c(1, 1, 1)) p <- ggplot2::ggplot(data = plotData, mapping = ggplot2::aes(x = group, y = y, fill = factor(x, levels = c("Test", "Validation", "Train")))) + - ggplot2::geom_bar(stat = "identity", col = "black", size = 0.5) + + ggplot2::geom_bar(stat = "identity", col = "black", linewidth = 0.5) + ggplot2::scale_y_continuous(name = NULL, limits = c(0, nTrain + nValid + nTest + ((nTrain + nValid + nTest) / 5))) + # adjust limits to include "Total" text ggplot2::coord_flip() + ggplot2::labs(x = NULL) + @@ -651,7 +651,7 @@ nTest <- result[["ntest"]] plotData <- data.frame(y = c(nTrainAndValid, nTest), x = c("Train and validation", "Test"), group = c(1, 1)) p <- ggplot2::ggplot(data = plotData, mapping = ggplot2::aes(x = group, y = y, fill = factor(x, levels = c("Test", "Train and validation")))) + - ggplot2::geom_bar(stat = "identity", col = "black", size = 0.5) + + ggplot2::geom_bar(stat = "identity", col = "black", linewidth = 0.5) + ggplot2::scale_y_continuous(name = NULL, limits = c(0, nTrainAndValid + nTest + ((nTrainAndValid + nTest) / 5))) + # adjust limits to include "Total" text ggplot2::coord_flip() + ggplot2::xlab(NULL) + diff --git a/R/mlRegressionBoosting.R b/R/mlRegressionBoosting.R index 38e3b459..89db4024 100644 --- a/R/mlRegressionBoosting.R +++ b/R/mlRegressionBoosting.R @@ -261,7 +261,7 @@ mlRegressionBoosting <- function(jaspResults, dataset, options, ...) { mapping = ggplot2::aes(x = xstart, xend = xend, y = ystart, yend = yend), linetype = "dashed", col = "darkgrey" ) + jaspGraphs::geom_line(mapping = ggplot2::aes(linetype = type)) + - ggplot2::geom_smooth(method = "loess", formula = y ~ x, size = 1, colour = "darkred", se = FALSE) + + ggplot2::geom_smooth(method = "loess", formula = y ~ x, linewidth = 1, colour = "darkred", se = FALSE) + ggplot2::scale_x_continuous(name = gettext("Number of Trees"), labels = xLabels, breaks = xBreaks, limits = c(0, max(xBreaks))) + ggplot2::scale_y_continuous(name = ylab, labels = yLabels, breaks = yBreaks, limits = range(yBreaks)) + ggplot2::labs(linetype = NULL) + diff --git a/R/mlRegressionDecisionTree.R b/R/mlRegressionDecisionTree.R index 31d10b45..dfaf4bf9 100644 --- a/R/mlRegressionDecisionTree.R +++ b/R/mlRegressionDecisionTree.R @@ -318,7 +318,7 @@ mlRegressionDecisionTree <- function(jaspResults, dataset, options, state = NULL } } p <- p + ggparty::geom_edge() + - ggparty::geom_edge_label(fill = "white", col = "darkred") + + ggparty::geom_edge_label(fill = "white", col = "darkred", linewidth = 0) + ggparty::geom_node_splitvar(mapping = ggplot2::aes(size = max(3, nodesize) / 2, label = info), fill = "white", col = "black") + ggparty::geom_node_label(mapping = ggplot2::aes(label = info, size = max(3, nodesize) / 2), ids = "terminal", fill = cols, col = "black", alpha = alpha) + ggplot2::scale_x_continuous(name = NULL, limits = c(min(p$data$x) - abs(0.1 * min(p$data$x)), max(p$data$x) * 1.1)) + diff --git a/R/mlRegressionKnn.R b/R/mlRegressionKnn.R index 32828523..95111bcd 100644 --- a/R/mlRegressionKnn.R +++ b/R/mlRegressionKnn.R @@ -294,7 +294,7 @@ mlRegressionKnn <- function(jaspResults, dataset, options, state = NULL) { yBreaks <- jaspGraphs::getPrettyAxisBreaks(c(0, 1), min.n = 4) # 0.001 for Inf at x = 0 in 'inv' weights plotFunc <- function(x) func(x) / func(0.001) p <- ggplot2::ggplot() + - ggplot2::stat_function(fun = plotFunc, size = 1, xlim = c(0.001, 1)) + + ggplot2::stat_function(fun = plotFunc, linewidth = 1, xlim = c(0.001, 1)) + ggplot2::scale_x_continuous(name = gettext("Proportion of Max. Distance"), breaks = xBreaks, limits = c(0, 1)) + ggplot2::scale_y_continuous(name = gettext("Relative Weight"), breaks = yBreaks, limits = c(0, 1)) + jaspGraphs::geom_rangeframe() + diff --git a/R/mlRegressionNeuralNetwork.R b/R/mlRegressionNeuralNetwork.R index 906d9ddd..da99e3cb 100644 --- a/R/mlRegressionNeuralNetwork.R +++ b/R/mlRegressionNeuralNetwork.R @@ -510,7 +510,7 @@ mlRegressionNeuralNetwork <- function(jaspResults, dataset, options, ...) { xBreaks <- jaspGraphs::getPrettyAxisBreaks(c(-6, 6), min.n = 4) yBreaks <- jaspGraphs::getPrettyAxisBreaks(c(-1, 1), min.n = 4) p <- ggplot2::ggplot() + - ggplot2::stat_function(fun = ac, size = 1) + + ggplot2::stat_function(fun = ac, linewidth = 1) + ggplot2::scale_x_continuous(name = gettext("Input"), breaks = xBreaks, limits = c(-6, 6)) + ggplot2::scale_y_continuous(name = gettext("Output"), breaks = yBreaks, limits = c(-1, 1)) + jaspGraphs::geom_rangeframe() +