Skip to content
This repository was archived by the owner on Oct 8, 2020. It is now read-only.

Commit d971aa0

Browse files
Removed compiler error and code cleanup
1 parent a6ddf00 commit d971aa0

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

sansa-examples-spark/src/main/scala/net/sansa_stack/examples/spark/ml/mining/MineRules.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ object MineRules {
3939
know.sethdfsPath(hdfsPath)
4040
know.setKbSrc(input)
4141

42-
know.setKbGraph(RDFGraphLoader.loadFromFile(know.getKbSrc(), spark.sparkContext, 2))
42+
know.setKbGraph(RDFGraphLoader.loadFromFile(input, spark.sparkContext, 2))
4343
know.setDFTable(DfLoader.loadFromFileDF(know.getKbSrc, spark.sparkContext, spark.sqlContext, 2))
4444

4545
val algo = new Algorithm(know, 0.01, 3, 0.1, hdfsPath)

sansa-examples-spark/src/main/scala/net/sansa_stack/examples/spark/query/Sparqlify.scala

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -43,17 +43,17 @@ object Sparqlify {
4343
val lang = Lang.NTRIPLES
4444
val graphRdd = spark.rdf(lang)(input)
4545

46-
val checkendpoint = endpoint match {
47-
case j if(endpoint) =>
46+
endpoint match {
47+
case j if endpoint =>
4848
val partitions = RdfPartitionUtilsSpark.partitionGraph(graphRdd)
4949
val rewriter = SparqlifyUtils3.createSparqlSqlRewriter(spark, partitions)
5050

5151
val port = 7531
5252

5353
val qef = new QueryExecutionFactorySparqlifySpark(spark, rewriter)
5454
val server = FactoryBeanSparqlServer.newInstance.setSparqlServiceFactory(qef).setPort(port).create()
55-
if (Desktop.isDesktopSupported()) {
56-
Desktop.getDesktop().browse(new URI("http://localhost:" + port + "/sparql"));
55+
if (Desktop.isDesktopSupported) {
56+
Desktop.getDesktop.browse(URI.create("http://localhost:" + port + "/sparql"))
5757
}
5858
server.join()
5959
case _ =>
@@ -77,11 +77,11 @@ object Sparqlify {
7777
action((x, c) => c.copy(in = x)).
7878
text("path to file that contains the data (in N-Triples format)")
7979

80-
opt[String]('q', "sparql").optional().valueName("query").
80+
opt[String]('q', "sparql").optional().valueName("<query>").
8181
action((x, c) => c.copy(sparql = x)).
8282
text("a SPARQL query")
8383

84-
opt[Boolean]('e', "endpoint").optional().valueName("SPARQL endoint enabled").
84+
opt[Boolean]('e', "endpoint").optional().valueName("SPARQL endpoint enabled").
8585
action((x, c) => c.copy(endpoint = x)).
8686
text("enable SPARQL endpoint , default:'enabled'")
8787

@@ -90,11 +90,11 @@ object Sparqlify {
9090
text("port that SPARQL endpoint will be exposed, default:'7531'")
9191

9292
checkConfig(c =>
93-
if (c.endpoint == false && c.sparql.isEmpty) failure("Option --sparql must not be empty if endpoint is disabled")
93+
if (!c.endpoint && c.sparql.isEmpty) failure("Option --sparql must not be empty if endpoint is disabled")
9494
else success)
9595

9696
checkConfig(c =>
97-
if (c.endpoint == true && c.port.isEmpty) failure("Option --port ust not be empty if endpoint is enabled")
97+
if (c.endpoint && c.port.isEmpty) failure("Option --port ust not be empty if endpoint is enabled")
9898
else success)
9999

100100
help("help").text("prints this usage text")

0 commit comments

Comments
 (0)