Skip to content

Commit 3527357

Browse files
author
Sumedh Wale
committed
[SNAPPYDATA] fixed doc references in R, build changes
- updated build to use common top-level sources/doc packaging methods if available - build changes for the switch from maven to maven-publish - fixed failure in an R test
1 parent 85f93f6 commit 3527357

File tree

4 files changed

+33
-18
lines changed

4 files changed

+33
-18
lines changed

R/pkg/R/context.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -303,7 +303,7 @@ setCheckpointDir <- function(sc, dirName) {
303303
#'
304304
#' A directory can be given if the recursive option is set to true.
305305
#' Currently directories are only supported for Hadoop-supported filesystems.
306-
#' Refer Hadoop-supported filesystems at \url{https://wiki.apache.org/hadoop/HCFS}.
306+
#' Refer Hadoop-supported filesystems at \url{https://cwiki.apache.org/confluence/display/HADOOP2/HCFS}.
307307
#'
308308
#' @rdname spark.addFile
309309
#' @param path The path of the file to be added

R/pkg/vignettes/sparkr-vignettes.Rmd

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -317,7 +317,7 @@ A common flow of grouping and aggregation is
317317

318318
2. Feed the `GroupedData` object to `agg` or `summarize` functions, with some provided aggregation functions to compute a number within each group.
319319

320-
A number of widely used functions are supported to aggregate data after grouping, including `avg`, `countDistinct`, `count`, `first`, `kurtosis`, `last`, `max`, `mean`, `min`, `sd`, `skewness`, `stddev_pop`, `stddev_samp`, `sumDistinct`, `sum`, `var_pop`, `var_samp`, `var`. See the [API doc for `mean`](http://spark.apache.org/docs/latest/api/R/mean.html) and other `agg_funcs` linked there.
320+
A number of widely used functions are supported to aggregate data after grouping, including `avg`, `countDistinct`, `count`, `first`, `kurtosis`, `last`, `max`, `mean`, `min`, `sd`, `skewness`, `stddev_pop`, `stddev_samp`, `sumDistinct`, `sum`, `var_pop`, `var_samp`, `var`. See the [API doc for `mean`](https://spark.apache.org/docs/2.1.3/api/R/mean.html) and other `agg_funcs` linked there.
321321

322322
For example we can compute a histogram of the number of cylinders in the `mtcars` dataset as shown below.
323323

build.gradle

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -312,7 +312,7 @@ task cleanSparkJUnit { doLast {
312312

313313
subprojects {
314314
apply plugin: 'scala'
315-
apply plugin: 'maven'
315+
apply plugin: 'maven-publish'
316316
apply plugin: 'scalaStyle'
317317

318318
int maxWorkers = project.hasProperty('org.gradle.workers.max') ?
@@ -352,6 +352,10 @@ subprojects {
352352
archiveClassifier.set('sources')
353353
from sourceSets.main.allSource
354354
}
355+
task packageScalaDocs(type: Jar, dependsOn: scaladoc) {
356+
archiveClassifier.set('javadoc')
357+
from scaladoc
358+
}
355359

356360
configurations {
357361
testOutput {
@@ -369,15 +373,6 @@ subprojects {
369373
testOutput packageTests
370374
}
371375
}
372-
task packageScalaDocs(type: Jar, dependsOn: scaladoc) {
373-
archiveClassifier.set('javadoc')
374-
from scaladoc
375-
}
376-
if (rootProject.hasProperty('enablePublish')) {
377-
artifacts {
378-
archives packageScalaDocs, packageSources
379-
}
380-
}
381376

382377
// fix scala+java mix to all use compileScala which use correct dependency order
383378
sourceSets.main.scala.srcDir 'src/main/java'

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 26 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,24 @@
1414
* See the License for the specific language governing permissions and
1515
* limitations under the License.
1616
*/
17+
/*
18+
* Changes for TIBCO Project SnappyData data platform.
19+
*
20+
* Portions Copyright (c) 2017-2022 TIBCO Software Inc. All rights reserved.
21+
*
22+
* Licensed under the Apache License, Version 2.0 (the "License"); you
23+
* may not use this file except in compliance with the License. You
24+
* may obtain a copy of the License at
25+
*
26+
* http://www.apache.org/licenses/LICENSE-2.0
27+
*
28+
* Unless required by applicable law or agreed to in writing, software
29+
* distributed under the License is distributed on an "AS IS" BASIS,
30+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
31+
* implied. See the License for the specific language governing
32+
* permissions and limitations under the License. See accompanying
33+
* LICENSE file.
34+
*/
1735

1836
package org.apache.spark.deploy
1937

@@ -427,9 +445,10 @@ class SparkSubmitSuite
427445
// Check if the SparkR package is installed
428446
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
429447
val main = MavenCoordinate("my.great.lib", "mylib", "0.1")
430-
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
431-
val rScriptDir = Seq(
432-
sparkHome, "R", "pkg", "tests", "fulltests", "packageInAJarTest.R").mkString(File.separator)
448+
val sparkProjectHome = sys.props.getOrElse("spark.project.home",
449+
sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")))
450+
val rScriptDir = Seq(sparkProjectHome,
451+
"R", "pkg", "tests", "fulltests", "packageInAJarTest.R").mkString(File.separator)
433452
assert(new File(rScriptDir).exists)
434453
IvyTestUtils.withRepository(main, None, None, withR = true) { repo =>
435454
val args = Seq(
@@ -446,11 +465,12 @@ class SparkSubmitSuite
446465

447466
test("include an external JAR in SparkR") {
448467
assume(RUtils.isRInstalled, "R isn't installed on this machine.")
449-
val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
468+
val sparkProjectHome = sys.props.getOrElse("spark.project.home",
469+
sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")))
450470
// Check if the SparkR package is installed
451471
assume(RUtils.isSparkRInstalled, "SparkR is not installed in this build.")
452-
val rScriptDir =
453-
Seq(sparkHome, "R", "pkg", "tests", "fulltests", "jarTest.R").mkString(File.separator)
472+
val rScriptDir = Seq(sparkProjectHome,
473+
"R", "pkg", "tests", "fulltests", "jarTest.R").mkString(File.separator)
454474
assert(new File(rScriptDir).exists)
455475

456476
// compile a small jar containing a class that will be called from R code.

0 commit comments

Comments
 (0)