Skip to content
This repository was archived by the owner on Oct 8, 2020. It is now read-only.

Commit 7b28bc8

Browse files
committed
Make Flink module Scala-style compliant
1 parent b5f91b1 commit 7b28bc8

File tree

7 files changed

+53
-44
lines changed

7 files changed

+53
-44
lines changed

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/inference/RDFGraphInference.scala

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,17 @@ import java.util.Properties
77
import scala.io.Source
88

99
import com.typesafe.config.ConfigFactory
10-
import org.apache.flink.api.java.utils.ParameterTool
11-
import org.apache.flink.api.scala.ExecutionEnvironment
12-
import org.apache.flink.configuration.Configuration
13-
import org.apache.flink.runtime.webmonitor.WebMonitorUtils
14-
1510
import net.sansa_stack.inference.flink.data.{ RDFGraphLoader, RDFGraphWriter }
1611
import net.sansa_stack.inference.flink.forwardchaining.{
1712
ForwardRuleReasonerOWLHorst,
1813
ForwardRuleReasonerRDFS
1914
}
20-
import net.sansa_stack.inference.rules.ReasoningProfile._
2115
import net.sansa_stack.inference.rules.{ RDFSLevel, ReasoningProfile }
16+
import net.sansa_stack.inference.rules.ReasoningProfile._
17+
import org.apache.flink.api.java.utils.ParameterTool
18+
import org.apache.flink.api.scala.ExecutionEnvironment
19+
import org.apache.flink.configuration.Configuration
20+
import org.apache.flink.runtime.webmonitor.WebMonitorUtils
2221

2322
object RDFGraphInference {
2423

@@ -40,14 +39,14 @@ object RDFGraphInference {
4039
}
4140

4241
def run(
43-
args: Array[String],
44-
input: Seq[URI],
45-
output: URI,
46-
profile: ReasoningProfile,
42+
args: Array[String],
43+
input: Seq[URI],
44+
output: URI,
45+
profile: ReasoningProfile,
4746
writeToSingleFile: Boolean,
48-
sortedOutput: Boolean,
49-
propertiesFile: File,
50-
jobName: String): Unit = {
47+
sortedOutput: Boolean,
48+
propertiesFile: File,
49+
jobName: String): Unit = {
5150

5251
// read reasoner optimization properties
5352
val reasonerConf =
@@ -99,13 +98,13 @@ object RDFGraphInference {
9998

10099
// the config object
101100
case class Config(
102-
in: Seq[URI] = Seq(),
103-
out: URI = new URI("."),
104-
profile: ReasoningProfile = ReasoningProfile.RDFS,
105-
writeToSingleFile: Boolean = false,
106-
sortedOutput: Boolean = false,
107-
propertiesFile: File = null,
108-
jobName: String = "") // new File(getClass.getResource("reasoner.properties").toURI)
101+
in: Seq[URI] = Seq(),
102+
out: URI = new URI("."),
103+
profile: ReasoningProfile = ReasoningProfile.RDFS,
104+
writeToSingleFile: Boolean = false,
105+
sortedOutput: Boolean = false,
106+
propertiesFile: File = null,
107+
jobName: String = "") // new File(getClass.getResource("reasoner.properties").toURI)
109108

110109
// read ReasoningProfile enum
111110
implicit val profilesRead: scopt.Read[ReasoningProfile.Value] =
@@ -159,4 +158,4 @@ object RDFGraphInference {
159158

160159
}
161160
parser.showUsageOnError
162-
}
161+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/ml/clustering/RDFByModularityClustering.scala

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
package net.sansa_stack.examples.flink.ml.clustering
2+
23
import scala.collection.mutable
4+
35
import net.sansa_stack.ml.flink.clustering.{ RDFByModularityClustering => RDFByModularityClusteringAlg }
4-
import org.apache.flink.api.scala.ExecutionEnvironment
56
import org.apache.flink.api.scala._
7+
import org.apache.flink.api.scala.ExecutionEnvironment
68

79
object RDFByModularityClustering {
8-
910
def main(args: Array[String]) {
1011
parser.parse(args, Config()) match {
1112
case Some(config) =>
@@ -49,4 +50,4 @@ object RDFByModularityClustering {
4950

5051
help("help").text("prints this usage text")
5152
}
52-
}
53+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/owl/OWLReaderDataSet.scala

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,10 @@
11
package net.sansa_stack.examples.flink.owl
22

3-
import net.sansa_stack.owl.flink.dataset.{ FunctionalSyntaxOWLAxiomsDataSetBuilder, ManchesterSyntaxOWLAxiomsDataSetBuilder }
3+
import scala.collection.mutable
4+
45
import org.apache.flink.api.scala.ExecutionEnvironment
6+
import net.sansa_stack.owl.flink.dataset.{ FunctionalSyntaxOWLAxiomsDataSetBuilder, ManchesterSyntaxOWLAxiomsDataSetBuilder }
57

6-
import scala.collection.mutable
78

89
object OWLReaderDataSet {
910

@@ -20,12 +21,12 @@ object OWLReaderDataSet {
2021

2122
println(".============================================.")
2223
println("| Dataset OWL reader example (" + syntax + " syntax)|")
23-
println("`============================================´")
24+
println(".============================================.")
2425

2526
val env = ExecutionEnvironment.getExecutionEnvironment
2627

2728
val dataSet = syntax match {
28-
case "fun" => FunctionalSyntaxOWLAxiomsDataSetBuilder.build(env, input)
29+
case "fun" => FunctionalSyntaxOWLAxiomsDataSetBuilder.build(env, input)
2930
case "manch" => ManchesterSyntaxOWLAxiomsDataSetBuilder.build(env, input)
3031
case "owl_xml" =>
3132
throw new RuntimeException("'" + syntax + "' - Not supported, yet.")
@@ -38,7 +39,7 @@ object OWLReaderDataSet {
3839
}
3940

4041
case class Config(
41-
in: String = "",
42+
in: String = "",
4243
syntax: String = "")
4344

4445
// the CLI parser
@@ -56,4 +57,4 @@ object OWLReaderDataSet {
5657

5758
help("help").text("prints this usage text")
5859
}
59-
}
60+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/rdf/RDFStats.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
11
package net.sansa_stack.examples.flink.rdf
22

3-
import scala.collection.mutable
43
import java.io.File
5-
import org.apache.flink.api.scala.ExecutionEnvironment
4+
5+
import scala.collection.mutable
6+
67
import net.sansa_stack.rdf.flink.data.RDFGraphLoader
78
import net.sansa_stack.rdf.flink.stats.RDFStatistics
9+
import org.apache.flink.api.scala.ExecutionEnvironment
810

911
object RDFStats {
1012

@@ -36,7 +38,7 @@ object RDFStats {
3638
}
3739

3840
case class Config(
39-
in: String = "",
41+
in: String = "",
4042
out: String = "")
4143

4244
// the CLI parser
@@ -54,4 +56,4 @@ object RDFStats {
5456

5557
help("help").text("prints this usage text")
5658
}
57-
}
59+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/rdf/TripleOps.scala

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
package net.sansa_stack.examples.flink.rdf
22

33
import scala.collection.mutable
4-
import org.apache.flink.api.scala.ExecutionEnvironment
4+
55
import net.sansa_stack.rdf.flink.data.RDFGraphLoader
66
import org.apache.flink.api.scala._
7+
import org.apache.flink.api.scala.ExecutionEnvironment
8+
79

810
object TripleOps {
911
def main(args: Array[String]) {
@@ -25,16 +27,16 @@ object TripleOps {
2527
val rdfgraph = RDFGraphLoader.loadFromFile(input, env)
2628

2729
rdfgraph.triples.collect().take(4).foreach(println(_))
28-
//Triples filtered by subject ( "http://dbpedia.org/resource/Charles_Dickens" )
30+
// Triples filtered by subject ( "http://dbpedia.org/resource/Charles_Dickens" )
2931
println("All triples related to Dickens:\n" + rdfgraph.find(Some("http://commons.dbpedia.org/resource/Category:Places"), None, None).collect().mkString("\n"))
3032

31-
//Triples filtered by predicate ( "http://dbpedia.org/ontology/influenced" )
33+
// Triples filtered by predicate ( "http://dbpedia.org/ontology/influenced" )
3234
println("All triples for predicate influenced:\n" + rdfgraph.find(None, Some("http://dbpedia.org/ontology/influenced"), None).collect().mkString("\n"))
3335

34-
//Triples filtered by object ( <http://dbpedia.org/resource/Henry_James> )
36+
// Triples filtered by object ( <http://dbpedia.org/resource/Henry_James> )
3537
println("All triples influenced by Henry_James:\n" + rdfgraph.find(None, None, Some("<http://dbpedia.org/resource/Henry_James>")).collect().mkString("\n"))
3638

37-
//println("Number of triples: " + rdfgraph.triples.distinct.count())
39+
// println("Number of triples: " + rdfgraph.triples.distinct.count())
3840
println("Number of subjects: " + rdfgraph.getSubjects.map(_.toString).distinct().count)
3941
println("Number of predicates: " + rdfgraph.getPredicates.map(_.toString).distinct.count())
4042
println("Number of objects: " + rdfgraph.getPredicates.map(_.toString).distinct.count())
@@ -52,4 +54,4 @@ object TripleOps {
5254
help("help").text("prints this usage text")
5355
}
5456

55-
}
57+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/rdf/TripleReader.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,10 @@
11
package net.sansa_stack.examples.flink.rdf
22

33
import scala.collection.mutable
4-
import org.apache.flink.api.scala.ExecutionEnvironment
4+
55
import net.sansa_stack.rdf.flink.data.RDFGraphLoader
6+
import org.apache.flink.api.scala.ExecutionEnvironment
7+
68

79
object TripleReader {
810

@@ -40,4 +42,4 @@ object TripleReader {
4042

4143
help("help").text("prints this usage text")
4244
}
43-
}
45+
}

sansa-examples-flink/src/main/scala/net/sansa_stack/examples/flink/rdf/TripleWriter.scala

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
package net.sansa_stack.examples.flink.rdf
2+
23
import scala.collection.mutable
3-
import org.apache.flink.api.scala.ExecutionEnvironment
4+
45
import net.sansa_stack.rdf.flink.data.{ RDFGraphLoader, RDFGraphWriter }
6+
import org.apache.flink.api.scala.ExecutionEnvironment
57

68
object TripleWriter {
79

@@ -30,7 +32,7 @@ object TripleWriter {
3032
}
3133

3234
case class Config(
33-
in: String = "",
35+
in: String = "",
3436
out: String = "")
3537

3638
val parser = new scopt.OptionParser[Config]("Triple writer example ") {

0 commit comments

Comments
 (0)