Skip to content
This repository was archived by the owner on Oct 8, 2020. It is now read-only.

Commit 0c76ed8

Browse files
committed
Added TRANSITIVE profiler example for Spark
1 parent 86bebcf commit 0c76ed8

File tree

1 file changed

+11
-9
lines changed

1 file changed

+11
-9
lines changed

sansa-examples-spark/src/main/scala/net/sansa_stack/examples/spark/inference/RDFGraphInference.scala

Lines changed: 11 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ package net.sansa_stack.examples.spark.inference
33
import java.io.File
44
import java.net.URI
55

6-
import net.sansa_stack.inference.rules.{RDFSLevel, ReasoningProfile}
6+
import net.sansa_stack.inference.rules.{ RDFSLevel, ReasoningProfile }
77
import net.sansa_stack.inference.rules.ReasoningProfile._
88
import net.sansa_stack.inference.spark.data.loader.RDFGraphLoader
99
import net.sansa_stack.inference.spark.data.writer.RDFGraphWriter
10-
import net.sansa_stack.inference.spark.forwardchaining.{ForwardRuleReasonerOWLHorst, ForwardRuleReasonerRDFS}
10+
import net.sansa_stack.inference.spark.forwardchaining.{ ForwardRuleReasonerOWLHorst, ForwardRuleReasonerRDFS, ForwardRuleReasonerRDFSDataset, TransitiveReasoner }
1111
import org.apache.spark.sql.SparkSession
1212

1313
import scala.collection.mutable
@@ -20,18 +20,20 @@ object RDFGraphInference {
2020
"Usage: RDFGraphInference <input> <output> <reasoner")
2121
System.err.println("Supported 'reasoner' as follows:")
2222
System.err.println(" rdfs Forward Rule Reasoner RDFS (Full)")
23-
System.err.println(" rdfs-simple Forward Rule Reasoner RDFS (Simple)")
23+
System.err.println(" rdfs-simple Forward Rule Reasoner RDFS (Simple)")
2424
System.err.println(" owl-horst Forward Rule Reasoner OWL Horst")
25+
System.err.println(" transitive Forward Rule Transitive Reasoner")
2526
System.exit(1)
2627
}
2728
val input = args(0) //"src/main/resources/rdf.nt"
2829
val output = args(1) //"src/main/resources/res/"
2930
val argprofile = args(2) //"rdfs"
3031

3132
val profile = argprofile match {
32-
case "rdfs" => ReasoningProfile.RDFS
33-
case "rdfs-simple" => ReasoningProfile.RDFS_SIMPLE
34-
case "owl-horst" => ReasoningProfile.OWL_HORST
33+
case "rdfs" => ReasoningProfile.RDFS
34+
case "rdfs-simple" => ReasoningProfile.RDFS_SIMPLE
35+
case "owl-horst" => ReasoningProfile.OWL_HORST
36+
case "transitive" => ReasoningProfile.TRANSITIVE
3537

3638
}
3739
val optionsList = args.drop(3).map { arg =>
@@ -66,13 +68,13 @@ object RDFGraphInference {
6668

6769
// create reasoner
6870
val reasoner = profile match {
69-
case RDFS => new ForwardRuleReasonerRDFS(sparkSession.sparkContext, parallelism)
71+
case TRANSITIVE => new TransitiveReasoner(sparkSession.sparkContext, parallelism)
72+
case RDFS => new ForwardRuleReasonerRDFS(sparkSession.sparkContext, parallelism)
7073
case RDFS_SIMPLE =>
71-
val r = new ForwardRuleReasonerRDFS(sparkSession.sparkContext, parallelism)
74+
var r = new ForwardRuleReasonerRDFS(sparkSession.sparkContext, parallelism) //.level.+(RDFSLevel.SIMPLE)
7275
r.level = RDFSLevel.SIMPLE
7376
r
7477
case OWL_HORST => new ForwardRuleReasonerOWLHorst(sparkSession.sparkContext)
75-
case RDFS => new ForwardRuleReasonerRDFS(sparkSession.sparkContext)
7678
}
7779

7880
// compute inferred graph

0 commit comments

Comments
 (0)