Skip to content
This repository was archived by the owner on Oct 8, 2020. It is now read-only.

Commit f01e2b6

Browse files
Scalastyle
1 parent 6752864 commit f01e2b6

16 files changed

+84
-131
lines changed

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/data/model/EmptyRDFGraphDataFrame.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ import org.apache.spark.sql.types.{StringType, StructField, StructType}
44
import org.apache.spark.sql.{DataFrame, Row, SQLContext}
55

66
/**
7+
* Represents an empty RDF graph as Dataframe.
8+
*
79
* @author Lorenz Buehmann
810
*/
911
object EmptyRDFGraphDataFrame {
@@ -13,7 +15,7 @@ object EmptyRDFGraphDataFrame {
1315
val schemaString = "subject predicate object"
1416

1517
// generate the schema based on the string of schema
16-
val schema = StructType(schemaString.split(" ").map(fieldName => StructField(fieldName, StringType, true)))
18+
val schema = StructType(schemaString.split(" ").map(fieldName => StructField(fieldName, StringType, nullable = true)))
1719

1820
// convert triples RDD to rows
1921
val rowRDD = sqlContext.sparkContext.emptyRDD[Row]
@@ -26,4 +28,4 @@ object EmptyRDFGraphDataFrame {
2628

2729
triplesDataFrame
2830
}
29-
}
31+
}

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/data/model/RDFGraphDataset.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class RDFGraphDataset(override val triples: Dataset[Triple])
3535

3636
def unionAll(graphs: Seq[RDFGraphDataset]): RDFGraphDataset = {
3737
// the Dataframe based solution
38-
return graphs.reduce(_ union _)
38+
graphs.reduce(_ union _)
3939

4040
// // to limit the lineage, we convert to RDDs first, and use the SparkContext Union method for a sequence of RDDs
4141
// val df: Option[DataFrame] = graphs match {

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/data/model/RDFTuple.scala

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,10 @@ package net.sansa_stack.inference.spark.data.model
55
*
66
* @param s the subject
77
* @param o the object
8-
*
98
* @author Lorenz Buehmann
109
*/
1110
case class RDFTuple(s: String, o: String) extends Product2[String, String] {
12-
override def _1: String = s
13-
override def _2: String = o
14-
}
11+
override def _1: String = s
12+
13+
override def _2: String = o
14+
}

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/AbstractForwardRuleReasonerRDFS.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -51,14 +51,14 @@ abstract class AbstractForwardRuleReasonerRDFS[Rdf <: RDF, D, G <: AbstractRDFGr
5151
// println("others:" + others.size())
5252

5353
/*
54-
rdfs5 xxx rdfs:subPropertyOf yyy .
55-
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
54+
rdfs5 xxx rdfs:subPropertyOf yyy .
55+
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
5656
*/
5757
val r5 = rule5(graph)
5858

5959
/*
60-
rdfs7 aaa rdfs:subPropertyOf bbb .
61-
xxx aaa yyy . xxx bbb yyy .
60+
rdfs7 aaa rdfs:subPropertyOf bbb .
61+
xxx aaa yyy . xxx bbb yyy .
6262
*/
6363
val r7 = rule7(others)
6464
others = others.union(r7)
@@ -73,8 +73,8 @@ abstract class AbstractForwardRuleReasonerRDFS[Rdf <: RDF, D, G <: AbstractRDFGr
7373
val r11 = rule11(graph)
7474

7575
/*
76-
rdfs9 xxx rdfs:subClassOf yyy .
77-
zzz rdf:type xxx . zzz rdf:type yyy .
76+
rdfs9 xxx rdfs:subClassOf yyy .
77+
zzz rdf:type xxx . zzz rdf:type yyy .
7878
*/
7979
val r9 = rule9(types)
8080

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/FixpointIteration.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ object FixpointIteration extends Logging {
2727
* the termination criterion. The iterations terminate when either the termination criterion
2828
* [[RDD]] contains no elements or when `maxIterations` iterations have been performed.
2929
*
30-
**/
30+
*/
3131
def apply[T: ClassTag](maxIterations: Int = 10)(rdd: RDD[T], f: RDD[T] => RDD[T]): RDD[T] = {
3232
var newRDD = rdd
3333
newRDD.cache()
@@ -56,7 +56,7 @@ object FixpointIteration extends Logging {
5656
* the termination criterion. The iterations terminate when either the termination criterion
5757
* RDD contains no elements or when `maxIterations` iterations have been performed.
5858
*
59-
**/
59+
*/
6060
def apply2[T: ClassTag](maxIterations: Int = 10)(dataset: Dataset[T], f: Dataset[T] => Dataset[T]): Dataset[T] = {
6161
var newDS = dataset
6262
newDS.cache()

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/ForwardRuleReasonerOWLHorst.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -179,8 +179,8 @@ class ForwardRuleReasonerOWLHorst(sc: SparkContext, parallelism: Int = 2) extend
179179
// 2. SubPropertyOf inheritance according to rdfs7 is computed
180180

181181
/*
182-
rdfs7 aaa rdfs:subPropertyOf bbb .
183-
xxx aaa yyy . xxx bbb yyy .
182+
rdfs7 aaa rdfs:subPropertyOf bbb .
183+
xxx aaa yyy . xxx bbb yyy .
184184
*/
185185
val triplesRDFS7 =
186186
triplesFiltered
@@ -193,17 +193,17 @@ class ForwardRuleReasonerOWLHorst(sc: SparkContext, parallelism: Int = 2) extend
193193
// 3. Domain and Range inheritance according to rdfs2 and rdfs3 is computed
194194

195195
/*
196-
rdfs2 aaa rdfs:domain xxx .
197-
yyy aaa zzz . yyy rdf:type xxx .
196+
rdfs2 aaa rdfs:domain xxx .
197+
yyy aaa zzz . yyy rdf:type xxx .
198198
*/
199199
val triplesRDFS2 =
200200
rdfs7Res
201201
.filter(t => domainMapBC.value.contains(t.p))
202202
.map(t => Triple.create(t.s, RDF.`type`.asNode, domainMapBC.value(t.p)))
203203

204204
/*
205-
rdfs3 aaa rdfs:range xxx .
206-
yyy aaa zzz . zzz rdf:type xxx .
205+
rdfs3 aaa rdfs:range xxx .
206+
yyy aaa zzz . zzz rdf:type xxx .
207207
*/
208208
val triplesRDFS3 =
209209
rdfs7Res
@@ -215,8 +215,8 @@ class ForwardRuleReasonerOWLHorst(sc: SparkContext, parallelism: Int = 2) extend
215215
// input are the rdf:type triples from RDFS2/RDFS3 and the ones contained in the original graph
216216

217217
/*
218-
rdfs9 xxx rdfs:subClassOf yyy .
219-
zzz rdf:type xxx . zzz rdf:type yyy .
218+
rdfs9 xxx rdfs:subClassOf yyy .
219+
zzz rdf:type xxx . zzz rdf:type yyy .
220220
*/
221221
val triplesRDFS9 =
222222
triplesRDFS2

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/ForwardRuleReasonerRDFS.scala

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,8 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Tr
5353
val subClassOfTriplesTrans = computeTransitiveClosure(subClassOfTriples, RDFS.subClassOf.asNode()).setName("rdfs11")// mutable.Set()++subClassOfTriples.collect())
5454

5555
/*
56-
rdfs5 xxx rdfs:subPropertyOf yyy .
57-
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
56+
rdfs5 xxx rdfs:subPropertyOf yyy .
57+
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
5858
*/
5959
val subPropertyOfTriples = extractTriples(schemaTriples, RDFS.subPropertyOf.asNode()) // extract rdfs:subPropertyOf triples
6060
val subPropertyOfTriplesTrans = computeTransitiveClosure(subPropertyOfTriples, RDFS.subPropertyOf.asNode()).setName("rdfs5")// extractTriples(mutable.Set()++subPropertyOfTriples.collect(), RDFS.subPropertyOf.getURI))
@@ -81,8 +81,8 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Tr
8181
// 2. SubPropertyOf inheritance according to rdfs7 is computed
8282

8383
/*
84-
rdfs7 aaa rdfs:subPropertyOf bbb .
85-
xxx aaa yyy . xxx bbb yyy .
84+
rdfs7 aaa rdfs:subPropertyOf bbb .
85+
xxx aaa yyy . xxx bbb yyy .
8686
*/
8787
val triplesRDFS7 =
8888
otherTriples // all triples (s p1 o)
@@ -97,8 +97,8 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Tr
9797
// 3. Domain and Range inheritance according to rdfs2 and rdfs3 is computed
9898

9999
/*
100-
rdfs2 aaa rdfs:domain xxx .
101-
yyy aaa zzz . yyy rdf:type xxx .
100+
rdfs2 aaa rdfs:domain xxx .
101+
yyy aaa zzz . yyy rdf:type xxx .
102102
*/
103103
val domainTriples = extractTriples(schemaTriples, RDFS.domain.asNode())
104104
val domainMap = domainTriples.map(t => (t.s, t.o)).collect.toMap
@@ -111,8 +111,8 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Tr
111111
.setName("rdfs2")
112112

113113
/*
114-
rdfs3 aaa rdfs:range xxx .
115-
yyy aaa zzz . zzz rdf:type xxx .
114+
rdfs3 aaa rdfs:range xxx .
115+
yyy aaa zzz . zzz rdf:type xxx .
116116
*/
117117
val rangeTriples = extractTriples(schemaTriples, RDFS.range.asNode())
118118
val rangeMap = rangeTriples.map(t => (t.s, t.o)).collect().toMap
@@ -133,8 +133,8 @@ class ForwardRuleReasonerRDFS(sc: SparkContext, parallelism: Int = 2) extends Tr
133133

134134
// 4. SubClass inheritance according to rdfs9
135135
/*
136-
rdfs9 xxx rdfs:subClassOf yyy .
137-
zzz rdf:type xxx . zzz rdf:type yyy .
136+
rdfs9 xxx rdfs:subClassOf yyy .
137+
zzz rdf:type xxx . zzz rdf:type yyy .
138138
*/
139139
val triplesRDFS9 =
140140
typeTriples // all rdf:type triples (s a A)

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/ForwardRuleReasonerRDFSDataframe.scala

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,8 @@ class ForwardRuleReasonerRDFSDataframe(session: SparkSession, parallelism: Int =
6565
// val checkSubclass = udf((cls: String) => subClassOfMapBC.value.contains(cls))
6666
// val makeSuperTypeTriple = udf((ind: String, cls: String) => (ind, subClassOfMapBC.value(cls)))
6767
/*
68-
rdfs5 xxx rdfs:subPropertyOf yyy .
69-
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
68+
rdfs5 xxx rdfs:subPropertyOf yyy .
69+
yyy rdfs:subPropertyOf zzz . xxx rdfs:subPropertyOf zzz .
7070
*/
7171
val subPropertyOfTriples = index(RDFS.subPropertyOf.asNode()) // extract rdfs:subPropertyOf triples
7272
val subPropertyOfTriplesTrans = broadcast(computeTransitiveClosureDF(subPropertyOfTriples.as[RDFTriple]).toDF().alias("SP"))
@@ -95,8 +95,8 @@ class ForwardRuleReasonerRDFSDataframe(session: SparkSession, parallelism: Int =
9595
// 2. SubPropertyOf inheritance according to rdfs7 is computed
9696

9797
/*
98-
rdfs7 aaa rdfs:subPropertyOf bbb .
99-
xxx aaa yyy . xxx bbb yyy .
98+
rdfs7 aaa rdfs:subPropertyOf bbb .
99+
xxx aaa yyy . xxx bbb yyy .
100100
*/
101101
val triplesRDFS7 =
102102
triples // all triples (s p1 o)
@@ -117,8 +117,8 @@ class ForwardRuleReasonerRDFSDataframe(session: SparkSession, parallelism: Int =
117117
// 3. Domain and Range inheritance according to rdfs2 and rdfs3 is computed
118118

119119
/*
120-
rdfs2 aaa rdfs:domain xxx .
121-
yyy aaa zzz . yyy rdf:type xxx .
120+
rdfs2 aaa rdfs:domain xxx .
121+
yyy aaa zzz . yyy rdf:type xxx .
122122
*/
123123
val domainTriples = broadcast(index(RDFS.domain.asNode()).alias("DOM"))
124124

@@ -132,8 +132,8 @@ class ForwardRuleReasonerRDFSDataframe(session: SparkSession, parallelism: Int =
132132
// triplesRDFS2.explain(true)
133133

134134
/*
135-
rdfs3 aaa rdfs:range xxx .
136-
yyy aaa zzz . zzz rdf:type xxx .
135+
rdfs3 aaa rdfs:range xxx .
136+
yyy aaa zzz . zzz rdf:type xxx .
137137
*/
138138
val rangeTriples = broadcast(index(RDFS.range.asNode()).alias("RAN"))
139139

@@ -154,8 +154,8 @@ class ForwardRuleReasonerRDFSDataframe(session: SparkSession, parallelism: Int =
154154
// 4. SubClass inheritance according to rdfs9
155155

156156
/*
157-
rdfs9 xxx rdfs:subClassOf yyy .
158-
zzz rdf:type xxx . zzz rdf:type yyy .
157+
rdfs9 xxx rdfs:subClassOf yyy .
158+
zzz rdf:type xxx . zzz rdf:type yyy .
159159
*/
160160
val tuplesRDFS9 = typeTuples
161161
.join(subClassOfTriplesTrans, $"TYPES.${sqlSchema.objectCol}" === $"SC.${sqlSchema.subjectCol}", "inner")
@@ -320,4 +320,4 @@ object ForwardRuleReasonerRDFSDataframe {
320320
val infGraph = ForwardRuleReasonerRDFSDataframe(session).apply(graph)
321321
println(infGraph.size())
322322
}
323-
}
323+
}

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/forwardchaining/triples/TransitiveReasoner.scala

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
7878
private def addTransitive(triples: Set[Triple]): Set[Triple] = {
7979
triples ++ (
8080
for (t1 <- triples; t2 <- triples if t1.o == t2.s)
81-
yield Triple.create(t1.s, t1.p, t2.o))
81+
yield Triple.create(t1.s, t1.p, t2.o))
8282
}
8383

8484
/**
@@ -101,7 +101,7 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
101101
/**
102102
* Computes the transitive closure for the given predicate on an RDD of triples.
103103
*
104-
* @param triples the RDD of triples
104+
* @param triples the RDD of triples
105105
* @param predicate the predicate
106106
* @return an RDD containing the transitive closure of the triples
107107
*/
@@ -139,7 +139,7 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
139139
rdd.join(edgesReversed).map(x => (x._2._2, x._2._1))
140140
}
141141

142-
// tc = FixpointIteration(10)(tc, f)
142+
// tc = FixpointIteration(10)(tc, f)
143143

144144
// the join is iterated until a fixed point is reached
145145
var i = 1
@@ -190,14 +190,14 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
190190

191191
// the join is iterated until a fixed point is reached
192192
var i = 1
193-
while(!deltaTC.isEmpty()) {
193+
while (!deltaTC.isEmpty()) {
194194
log.info(s"iteration $i...")
195195

196196
// perform the join (x, y) x (y, x), obtaining an RDD of (x=y, (y, x)) pairs,
197197
// then project the result to obtain the new (x, y) paths.
198198
deltaTC = deltaTC.join(edgesReversed)
199-
.map(x => (x._2._2, x._2._1))
200-
.subtract(tc).distinct().cache()
199+
.map(x => (x._2._2, x._2._1))
200+
.subtract(tc).distinct().cache()
201201

202202
// add to TC
203203
tc = tc.union(deltaTC).cache()
@@ -217,7 +217,7 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
217217
*/
218218
def computeTransitiveClosure(edges: Dataset[Triple]): Dataset[Triple] = {
219219
log.info("computing TC...")
220-
// implicit val myObjEncoder = org.apache.spark.sql.Encoders.kryo[RDFTriple]
220+
// implicit val myObjEncoder = org.apache.spark.sql.Encoders.kryo[RDFTriple]
221221
val spark = edges.sparkSession.sqlContext
222222
import spark.implicits._
223223
implicit val myObjEncoder = org.apache.spark.sql.Encoders.kryo[Triple]
@@ -242,12 +242,12 @@ class TransitiveReasoner(sc: SparkContext, val properties: Seq[Node], val parall
242242

243243
tc.createOrReplaceTempView("SC")
244244
var joined = tc.as("A").join(tc.as("B"), $"A.o" === $"B.s").select("A.s", "A.p", "B.o").as[Triple]
245-
// var joined = tc
246-
// .join(edges, tc("o") === edges("s"))
247-
// .select(tc("s"), tc("p"), edges("o"))
248-
// .as[RDFTriple]
249-
// tc.sqlContext.
250-
// sql("SELECT A.subject, A.predicate, B.object FROM SC A INNER JOIN SC B ON A.object = B.subject")
245+
// var joined = tc
246+
// .join(edges, tc("o") === edges("s"))
247+
// .select(tc("s"), tc("p"), edges("o"))
248+
// .as[RDFTriple]
249+
// tc.sqlContext.
250+
// sql("SELECT A.subject, A.predicate, B.object FROM SC A INNER JOIN SC B ON A.object = B.subject")
251251

252252
// joined.explain()
253253
// var joined = df1.join(df2, df1("object") === df2("subject"), "inner")

sansa-inference-spark/src/main/scala/net/sansa_stack/inference/spark/utils/PrettyDuration.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ object PrettyDuration {
88

99
def pretty: String = pretty(includeNanos = false)
1010

11-
/** Selects most apropriate TimeUnit for given duration and formats it accordingly */
11+
/** Selects most appropriate TimeUnit for given duration and formats it accordingly */
1212
def pretty(includeNanos: Boolean, precision: Int = 4): String = {
1313
require(precision > 0, "precision must be > 0")
1414

@@ -48,4 +48,4 @@ object PrettyDuration {
4848
}
4949
}
5050

51-
}
51+
}

0 commit comments

Comments
 (0)