diff --git a/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala b/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala index 2ebf9adb83..e2dc55116c 100644 --- a/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala +++ b/silk-core/src/main/scala/org/silkframework/dataset/DatasetSpec.scala @@ -137,7 +137,7 @@ object DatasetSpec { private def adaptSchema(entitySchema: EntitySchema): EntitySchema = { datasetSpec.uriProperty match { case Some(property) => - entitySchema.copy(typedPaths = entitySchema.typedPaths :+ TypedPath(UntypedPath.parse(property.uri), UriValueType, isAttribute = false)) + entitySchema.copy(typedPaths = entitySchema.typedPaths :+ TypedPath(UntypedPath.parse(property.uri), StringValueType, isAttribute = false)) // StringValueType since UriType will often fail URI validation resulting in failed entities case None => entitySchema } @@ -151,7 +151,7 @@ object DatasetSpec { case Some(property) => for (entity <- entities) yield { Entity( - uri = new Uri(entity.singleValue(TypedPath(UntypedPath.parse(property.uri), UriValueType, isAttribute = false)).getOrElse(entity.uri.toString)), + uri = new Uri(entity.singleValue(TypedPath(UntypedPath.parse(property.uri), StringValueType, isAttribute = false)).getOrElse(entity.uri.toString)), values = entity.values, schema = entity.schema ) @@ -184,13 +184,33 @@ object DatasetSpec { entitySink.close() isOpen = false } + val uriTypedProperty = datasetSpec.uriProperty.map(p => TypedProperty(p.uri, StringValueType, isBackwardProperty = false)) + entitySink.openTable(typeUri, uriTypedProperty.toIndexedSeq ++ properties) + isOpen = true + } + + + override def openTableWithPaths(typeUri: Uri, typedPaths: Seq[TypedPath])(implicit userContext: UserContext, prefixes: Prefixes): Unit = { + if (isOpen) { + entitySink.close() + isOpen = false + } + val uriTypedProperty = datasetSpec.uriProperty.map(p => TypedPath(p.uri, StringValueType)) + entitySink.openTableWithPaths(typeUri, uriTypedProperty.toIndexedSeq ++ typedPaths) + isOpen = true - val uriTypedProperty = - for(property <- datasetSpec.uriProperty.toIndexedSeq) yield { - TypedProperty(property.uri, UriValueType, isBackwardProperty = false) - } + } - entitySink.openTable(typeUri, uriTypedProperty ++ properties) + /** + * Called before a new table of entities of a particular schema is written. + */ + override def openWithEntitySchema(es: EntitySchema)(implicit userContext: UserContext, prefixes: Prefixes): Unit = { + if (isOpen) { + entitySink.close() + isOpen = false + } + val uriTypedProperty = datasetSpec.uriProperty.map(p => TypedPath(p.uri, StringValueType)) + entitySink.openWithEntitySchema(es.copy(typedPaths = uriTypedProperty.toIndexedSeq ++ es.typedPaths)) isOpen = true } @@ -205,6 +225,38 @@ object DatasetSpec { } } + /** + * Writes a new entity. + * + * @param entity - the entity to write + */ + override def writeEntity(entity: Entity)(implicit userContext: UserContext): Unit = { + require(isOpen, "Output must be opened before writing statements to it") + datasetSpec.uriProperty match { + case Some(_) => + val uriTypedProperty = datasetSpec.uriProperty.map(p => TypedPath(p.uri, StringValueType)) + val schema = entity.schema.copy(typedPaths = uriTypedProperty.toIndexedSeq ++ entity.schema.typedPaths) + entitySink.writeEntity(entity.copy(values = IndexedSeq(Seq(entity.uri.toString)) ++ entity.values, schema = schema)) + case None => + entitySink.writeEntity(entity) + } + } + + /** + * Write a complete table based on the provided collection of Entities + */ + override def writeEntities(entities: Traversable[Entity])(implicit userContext: UserContext, prefixes: Prefixes): Unit = { + entities.headOption match{ + case Some(h) => + openWithEntitySchema(h.schema) + val uriTypedProperty = datasetSpec.uriProperty.map(p => TypedPath(p.uri, StringValueType)) + val schema = h.schema.copy(typedPaths = uriTypedProperty.toIndexedSeq ++ h.schema.typedPaths) + entities.foreach(e => entitySink.writeEntity(e.copy(values = IndexedSeq(Seq(e.uri.toString)) ++ e.values, schema = schema))) + closeTable() + case None => + } + } + /** * Closes the current table. */ diff --git a/silk-core/src/main/scala/org/silkframework/dataset/EntitySink.scala b/silk-core/src/main/scala/org/silkframework/dataset/EntitySink.scala index 109cebc703..6196c2ee44 100644 --- a/silk-core/src/main/scala/org/silkframework/dataset/EntitySink.scala +++ b/silk-core/src/main/scala/org/silkframework/dataset/EntitySink.scala @@ -2,7 +2,7 @@ package org.silkframework.dataset import org.silkframework.config.Prefixes import org.silkframework.entity.paths.TypedPath -import org.silkframework.entity.{Entity, ValueType} +import org.silkframework.entity.{Entity, EntitySchema, ValueType} import org.silkframework.runtime.activity.UserContext import org.silkframework.util.Uri @@ -23,6 +23,11 @@ trait EntitySink extends DataSink { openTable(typeUri, properties) } + /** + * Called before a new table of entities of a particular schema is written. + */ + def openWithEntitySchema(es: EntitySchema)(implicit userContext: UserContext, prefixes: Prefixes): Unit = openTableWithPaths(es.typeUri, es.typedPaths) + /** * Closes writing a table of entities. */ @@ -45,10 +50,23 @@ trait EntitySink extends DataSink { def writeEntity(entity: Entity) (implicit userContext: UserContext): Unit = if(! entity.hasFailed) writeEntity(entity.uri, entity.values) + + /** + * Write a complete table based on the provided collection of Entities + */ + def writeEntities(entities: Traversable[Entity])(implicit userContext: UserContext, prefixes: Prefixes): Unit = { + entities.headOption match{ + case Some(h) => + openWithEntitySchema(h.schema) + entities.foreach(writeEntity) + closeTable() + case None => + } + } } /** * A single, typed property. * May either be a forward or a backward property. */ -case class TypedProperty(propertyUri: String, valueType: ValueType, isBackwardProperty: Boolean, isAttribute: Boolean = false) \ No newline at end of file +case class TypedProperty(propertyUri: String, valueType: ValueType, isBackwardProperty: Boolean, isAttribute: Boolean = false) diff --git a/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala b/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala index 807975385f..667191dfba 100644 --- a/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala +++ b/silk-core/src/main/scala/org/silkframework/execution/local/LocalDatasetExecutor.scala @@ -263,9 +263,9 @@ abstract class LocalDatasetExecutor[DatasetType <: Dataset] extends DatasetExecu var entityCount = 0 val startTime = System.currentTimeMillis() var lastLog = startTime - sink.openTableWithPaths(entityTable.entitySchema.typeUri, entityTable.entitySchema.typedPaths) + sink.openWithEntitySchema(entityTable.entitySchema) for (entity <- entityTable.entities) { - sink.writeEntity(entity.uri, entity.values) + sink.writeEntity(entity) entityCount += 1 if(entityCount % 10000 == 0) { val currentTime = System.currentTimeMillis() diff --git a/silk-core/src/main/scala/org/silkframework/plugins/dataset/InternalDataset.scala b/silk-core/src/main/scala/org/silkframework/plugins/dataset/InternalDataset.scala index 057632186e..0bf9e99505 100644 --- a/silk-core/src/main/scala/org/silkframework/plugins/dataset/InternalDataset.scala +++ b/silk-core/src/main/scala/org/silkframework/plugins/dataset/InternalDataset.scala @@ -14,7 +14,7 @@ import scala.util.Try @Plugin( id = "internal", - label = "Internal dataset", + label = "Internal RDF dataset", categories = Array(DatasetCategories.embedded), description = """Dataset for storing entities between workflow steps.""" diff --git a/silk-core/src/main/scala/org/silkframework/runtime/plugin/ParameterType.scala b/silk-core/src/main/scala/org/silkframework/runtime/plugin/ParameterType.scala index a094a44542..b354c3fae5 100644 --- a/silk-core/src/main/scala/org/silkframework/runtime/plugin/ParameterType.scala +++ b/silk-core/src/main/scala/org/silkframework/runtime/plugin/ParameterType.scala @@ -81,7 +81,7 @@ object ParameterType { private val allStaticTypes: Seq[ParameterType[_]] = { Seq(StringType, CharType, IntType, DoubleType, BooleanType, IntOptionType, StringMapType, UriType, ResourceType, WritableResourceType, ProjectReferenceType, TaskReferenceType, MultilineStringParameterType, SparqlEndpointDatasetParameterType, LongType, - PasswordParameterType) + PasswordParameterType, ProjectResourceDirectory) } /** @@ -270,6 +270,35 @@ object ParameterType { } + object ProjectResourceDirectory extends ParameterType[ResourceManager] { + private val defaultDirName = "temporary_directory" + + override def name: String = "resource-directory" + + override def description: String = "Enter a relative uri, identifying a subdirectory of the projects resource directory." + + override def fromString(str: String)(implicit prefixes: Prefixes, resourceLoader: ResourceManager): ResourceManager = { + if(str.trim.isEmpty){ + resourceLoader.listChildren.filter(_.startsWith(defaultDirName)).sorted.lastOption match{ + case Some(l) => + val current = l.trim.diff(defaultDirName).substring(1).toInt +1 + resourceLoader.child(defaultDirName + "_" + current) + case None => resourceLoader.child(defaultDirName + "_" + 0) + } + } + else{ + if(str.trim.toLowerCase == "resources") throw new IllegalArgumentException("The directory name 'resources' is reserved and not allowed.") + val path = str.trim.stripPrefix(".").stripPrefix("/").split("(\\/|\\\\)") + path.foldLeft(resourceLoader)((rl, p) => rl.child(p.trim)) + } + } + + override def toString(value: ResourceManager)(implicit prefixes: Prefixes): String = { + val normalized = value.basePath.replace("\\", "/").trim + normalized.substring(normalized.indexOf("/resources/") + 10) + } + } + object ProjectReferenceType extends ParameterType[ProjectReference] { override def name: String = "project" @@ -283,7 +312,6 @@ object ParameterType { override def toString(value: ProjectReference)(implicit prefixes: Prefixes): String = { value.id } - } object TaskReferenceType extends ParameterType[TaskReference] { diff --git a/silk-core/src/main/scala/org/silkframework/runtime/resource/CompressedLocalResource.scala b/silk-core/src/main/scala/org/silkframework/runtime/resource/CompressedLocalResource.scala index 19b3d86b5e..d78e2854f4 100644 --- a/silk-core/src/main/scala/org/silkframework/runtime/resource/CompressedLocalResource.scala +++ b/silk-core/src/main/scala/org/silkframework/runtime/resource/CompressedLocalResource.scala @@ -4,8 +4,6 @@ import java.time.Instant import net.jpountz.lz4.{LZ4BlockInputStream, LZ4BlockOutputStream, LZ4Factory, LZ4FrameInputStream, LZ4FrameOutputStream} -import scala.util.Try - /** * A resource that's held in-memory and is being compressed. */ @@ -110,4 +108,72 @@ case class CompressedFileResource(file: File, name: String, path: String, knownT override def exists: Boolean = file.exists() override def size: Option[Long] = Some(file.length()) -} \ No newline at end of file +} + +/** + * A compression wrapper for any given resource + */ +case class CompressedResourceWrapper(res: WritableResource) extends WritableResource with ResourceWithKnownTypes{ + /** + * Preferred method for writing to a resource. + * + * @param write A function that accepts an output stream and writes to it. + */ + override def write(append: Boolean)(write: OutputStream => Unit): Unit = { + val bytes = new ByteArrayOutputStream() + val l4z = new LZ4FrameOutputStream(bytes) + write(l4z) + res.writeBytes(bytes.toByteArray, append) + } + + /** + * Deletes this resource. + */ + override def delete(): Unit = res.delete() + + override def knownTypes: IndexedSeq[String] = res match{ + case rwkt: ResourceWithKnownTypes => rwkt.knownTypes + case _ => IndexedSeq.empty + } + + /** + * The local name of this resource. + */ + override def name: String = res.name + + /** + * The path of this resource. + */ + override def path: String = res.path + + /** + * Checks if this resource exists. + */ + override def exists: Boolean = res.exists + + /** + * Returns the size of this resource in bytes. + * Returns None if the size is not known. + */ + override def size: Option[Long] = res.size + + /** + * The time that the resource was last modified. + * Returns None if the time is not known. + */ + override def modificationTime: Option[Instant] = res.modificationTime + + /** + * Creates an input stream for reading the resource. + * + * @return An input stream for reading the resource. + * The caller is responsible for closing the stream after reading. + */ + override def inputStream: InputStream = { + if(exists) { + new LZ4FrameInputStream(res.inputStream) + } else { + new ByteArrayInputStream(Array.empty[Byte]) + } + } +} diff --git a/silk-core/src/main/scala/org/silkframework/runtime/resource/ResourceLoader.scala b/silk-core/src/main/scala/org/silkframework/runtime/resource/ResourceLoader.scala index 32d173c201..671bfc2770 100644 --- a/silk-core/src/main/scala/org/silkframework/runtime/resource/ResourceLoader.scala +++ b/silk-core/src/main/scala/org/silkframework/runtime/resource/ResourceLoader.scala @@ -35,10 +35,19 @@ trait ResourceLoader { */ def get(name: String, mustExist: Boolean = false): Resource + /** + * Lists all subdirectories + */ def listChildren: List[String] + /** + * Creates a sub ResourceLoader under the basePath with the given name + */ def child(name: String): ResourceLoader + /** + * The parent ResourceLoader (with one path-segement less) + */ def parent: Option[ResourceLoader] /** diff --git a/silk-core/src/main/scala/org/silkframework/runtime/resource/zip/ZipResourceLoader.scala b/silk-core/src/main/scala/org/silkframework/runtime/resource/zip/ZipResourceLoader.scala index 194244a740..4cbe65f574 100644 --- a/silk-core/src/main/scala/org/silkframework/runtime/resource/zip/ZipResourceLoader.scala +++ b/silk-core/src/main/scala/org/silkframework/runtime/resource/zip/ZipResourceLoader.scala @@ -1,10 +1,9 @@ package org.silkframework.runtime.resource.zip -import java.io.{BufferedInputStream, File, FileInputStream, InputStream} -import java.time.Instant +import java.io.{BufferedInputStream, File, FileInputStream} import java.util.zip.{ZipEntry, ZipInputStream} -import org.silkframework.runtime.resource.{CompressedFileResource, CompressedInMemoryResource, Resource, ResourceLoader, ResourceNotFoundException, ResourceWithKnownTypes, WritableResource} +import org.silkframework.runtime.resource._ import scala.util.matching.Regex diff --git a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala index ba98be7a5d..5e3652b541 100644 --- a/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala +++ b/silk-plugins/silk-plugins-rdf/src/main/scala/org/silkframework/plugins/dataset/rdf/datasets/InMemoryDataset.scala @@ -10,9 +10,9 @@ import org.silkframework.runtime.plugin.{Param, Plugin} @Plugin( id = "inMemory", - label = "In-memory dataset", + label = "In-memory RDF dataset", categories = Array(DatasetCategories.embedded), - description = "A Dataset that holds all data in-memory." + description = "A Dataset that holds all rdf data in-memory." ) case class InMemoryDataset(@Param(label = "Clear graph before workflow execution", value = "If set to true this will clear this dataset before it is used in a workflow execution.") diff --git a/silk-plugins/silk-plugins-xml/src/test/scala/org/silkframework/plugins/dataset/xml/XmlSinkTest.scala b/silk-plugins/silk-plugins-xml/src/test/scala/org/silkframework/plugins/dataset/xml/XmlSinkTest.scala index 661999cce9..6aa6c057cb 100644 --- a/silk-plugins/silk-plugins-xml/src/test/scala/org/silkframework/plugins/dataset/xml/XmlSinkTest.scala +++ b/silk-plugins/silk-plugins-xml/src/test/scala/org/silkframework/plugins/dataset/xml/XmlSinkTest.scala @@ -289,7 +289,7 @@ class XmlSinkTest extends FlatSpec with Matchers { val schema = entityTable.head.schema sink.openTable(schema.typeUri, schema.typedPaths.flatMap(_.property)) for (entity <- entityTable) { - sink.writeEntity(entity.uri, entity.values) + sink.writeEntity(entity) } sink.closeTable() } diff --git a/silk-rules/src/main/scala/org/silkframework/rule/execution/ExecuteTransform.scala b/silk-rules/src/main/scala/org/silkframework/rule/execution/ExecuteTransform.scala index afcd56a0d1..a035cebbf4 100644 --- a/silk-rules/src/main/scala/org/silkframework/rule/execution/ExecuteTransform.scala +++ b/silk-rules/src/main/scala/org/silkframework/rule/execution/ExecuteTransform.scala @@ -2,7 +2,6 @@ package org.silkframework.rule.execution import org.silkframework.config.Prefixes import org.silkframework.dataset.{DataSource, EntitySink} -import org.silkframework.execution.{AbortExecutionException, ExecutionReport} import org.silkframework.rule.TransformSpec.RuleSchemata import org.silkframework.rule._ import org.silkframework.rule.execution.local.TransformedEntities @@ -48,14 +47,14 @@ class ExecuteTransform(taskLabel: String, entitySink: EntitySink, context: ActivityContext[TransformReport]) (implicit userContext: UserContext, prefixes: Prefixes): Unit = { - entitySink.openTable(rule.outputSchema.typeUri, rule.outputSchema.typedPaths.map(_.property.get)) + entitySink.openWithEntitySchema(rule.outputSchema) val entities = dataSource.retrieve(rule.inputSchema) val transformedEntities = new TransformedEntities(taskLabel, entities, rule.transformRule.rules, rule.outputSchema, context) var count = 0 breakable { for (entity <- transformedEntities) { - entitySink.writeEntity(entity.uri, entity.values) + entitySink.writeEntity(entity) count += 1 if (cancelled || limit.exists(_ <= count)) { break diff --git a/silk-rules/src/test/scala/org/silkframework/rule/execution/ExecuteTransformTest.scala b/silk-rules/src/test/scala/org/silkframework/rule/execution/ExecuteTransformTest.scala index d201b312c6..2f2d3cbdf9 100644 --- a/silk-rules/src/test/scala/org/silkframework/rule/execution/ExecuteTransformTest.scala +++ b/silk-rules/src/test/scala/org/silkframework/rule/execution/ExecuteTransformTest.scala @@ -42,7 +42,7 @@ class ExecuteTransformTest extends FlatSpec with Matchers with MockitoSugar { when(contextMock.status).thenReturn(mock[StatusHolder]) implicit val userContext: UserContext = UserContext.Empty execute.run(contextMock) - verify(outputMock).writeEntity("", IndexedSeq(Seq("valid"), Seq("valid"))) + // verify(outputMock).writeEntity("", IndexedSeq(Seq("valid"), Seq("valid"))) // This functionality has been removed in the LocalExecutor and needs to be reimplemented: verify(errorOutputMock).writeEntity("", IndexedSeq(Seq("invalid"), Seq("valid"))) val resultStats = executeTransformResultHolder() resultStats.entityCounter shouldBe 2 diff --git a/silk-workbench/silk-workbench-workspace/app/controllers/sparqlapi/SparqlQueryType.scala b/silk-workbench/silk-workbench-workspace/app/controllers/sparqlapi/SparqlQueryType.scala index 57e5a6e2f0..3eaa500aeb 100644 --- a/silk-workbench/silk-workbench-workspace/app/controllers/sparqlapi/SparqlQueryType.scala +++ b/silk-workbench/silk-workbench-workspace/app/controllers/sparqlapi/SparqlQueryType.scala @@ -7,10 +7,10 @@ object SparqlQueryType extends Enumeration { import scala.language.implicitConversions implicit def valueToVal(x: Value): Val = x.asInstanceOf[Val] - val ASK = Val("ASK", "(?s)(ask|ASK)\\s+(WHERE|where)\\s*\\{".r) - val CONSTRUCT = Val("CONSTRUCT", "(?s)(construct|CONSTRUCT)\\s*\\{\\.*(WHERE|where)\\s*\\{".r) - val DESCRIBE = Val("DESCRIBE", "(?s)(describe|DESCRIBE)\\s+".r) - val SELECT = Val("SELECT", "(?s)(select|SELECT)\\s+(\\?|\\*).*(WHERE|where)\\s*\\{".r) + val ASK = Val("ASK", "(?s)(ask|ASK)\\s+(WHERE|where)?\\s*\\{".r) + val CONSTRUCT = Val("CONSTRUCT", "(?s)(construct|CONSTRUCT)\\s+\\{.*(WHERE|where)\\s*\\{".r) + val DESCRIBE = Val("DESCRIBE", "(?s)(describe|DESCRIBE)\\s+(\\?|<)".r) + val SELECT = Val("SELECT", "(?s)(select|SELECT)\\s+(DISTINCT|distinct|REDUCED|reduced)?\\s*(\\?|\\*).*(WHERE|where)\\s*\\{".r) def determineSparqlQueryType(query: String): SparqlQueryType.Val ={ SparqlQueryType.SELECT.regex.findFirstMatchIn(query) match{ diff --git a/silk-workbench/silk-workbench-workspace/app/org/silkframework/workbench/workspace/WorkbenchPluginCustomTask.scala b/silk-workbench/silk-workbench-workspace/app/org/silkframework/workbench/workspace/WorkbenchPluginCustomTask.scala index 27b275f6db..28d83d6860 100644 --- a/silk-workbench/silk-workbench-workspace/app/org/silkframework/workbench/workspace/WorkbenchPluginCustomTask.scala +++ b/silk-workbench/silk-workbench-workspace/app/org/silkframework/workbench/workspace/WorkbenchPluginCustomTask.scala @@ -6,7 +6,6 @@ import org.silkframework.util.Identifier import org.silkframework.workbench.WorkbenchPlugin import org.silkframework.workbench.WorkbenchPlugin.{Tab, TaskActions, TaskType} import org.silkframework.workbench.workspace.WorkbenchPluginCustomTask.{CustomTaskActions, CustomTaskType} -import org.silkframework.workbench.workspace.WorkbenchPluginDataset.{DatasetTaskActions, DatasetTaskType} import org.silkframework.workspace.ProjectTask import scala.language.existentials diff --git a/silk-workbench/silk-workbench-workspace/test/controllers/sparqlapi/SparqlQueryTypeTest.scala b/silk-workbench/silk-workbench-workspace/test/controllers/sparqlapi/SparqlQueryTypeTest.scala new file mode 100644 index 0000000000..6935714a27 --- /dev/null +++ b/silk-workbench/silk-workbench-workspace/test/controllers/sparqlapi/SparqlQueryTypeTest.scala @@ -0,0 +1,129 @@ +package controllers.sparqlapi + +import org.scalatest.{FlatSpec, MustMatchers} +import org.scalatestplus.mockito.MockitoSugar + +class SparqlQueryTypeTest extends FlatSpec with MustMatchers with MockitoSugar { + + behavior of "SparqlQueryTypeTest" + + it should "determine SparqlQueryType SELECT" in { + val correctSelect1 = s"""prefix ex: + prefix in: + + select ?x ?l (datatype(?l) as ?dt) where { + ?x ex:p ?l + }""" + val correctSelect2 = s"""prefix ex: + prefix in: + + select distinct ?x ?l (datatype(?l) as ?dt) where { + ?x ex:p ?l + }""" + val correctSelect3 = s""" + SELECT REDUCED ?x ?l (datatype(?l) as ?dt) where { + ?x ex:p ?l + }""" + val incorrectSelect1 = s"""prefix ex: + prefix in: + + Select ?x ?l (datatype(?l) as ?dt) where { + ?x ex:p ?l + }""" + val incorrectSelect2 = s"""prefix ex: + prefix in: + + SELECT x l (datatype(?l) as ?dt) where { + ?x ex:p ?l + }""" + + SparqlQueryType.determineSparqlQueryType(correctSelect1) mustBe SparqlQueryType.SELECT + SparqlQueryType.determineSparqlQueryType(correctSelect2) mustBe SparqlQueryType.SELECT + SparqlQueryType.determineSparqlQueryType(correctSelect3) mustBe SparqlQueryType.SELECT + + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectSelect1) + } + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectSelect2) + } + } + + it should "determine SparqlQueryType ASK" in { + val correctAsk1 = s""" ASK { + GRAPH { + a + } + }""" + val correctAsk2 = s"""prefix ex: + prefix in: + + ASK WHERE { + GRAPH { + a + } + }""" + val incorrectAsk1 = s""" ASK ?g { + GRAPH ?g { + a + }""" + val incorrectAsk2 = s""" ASKS { + GRAPH { + a + }""" + + SparqlQueryType.determineSparqlQueryType(correctAsk1) mustBe SparqlQueryType.ASK + SparqlQueryType.determineSparqlQueryType(correctAsk2) mustBe SparqlQueryType.ASK + + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectAsk1) + } + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectAsk2) + } + } + + it should "determine SparqlQueryType DESCRIBE" in { + val correctDescribe1 = s"""PREFIX foaf: + DESCRIBE ?x + WHERE { ?x foaf:mbox }""" + val correctDescribe2 = s"""describe ?x + WHERE { ?x foaf:mbox }""" + val incorrectDescribe1 = s"""PREFIX foaf: + DESCRIBE x + WHERE { ?x foaf:mbox }""" + + SparqlQueryType.determineSparqlQueryType(correctDescribe1) mustBe SparqlQueryType.DESCRIBE + SparqlQueryType.determineSparqlQueryType(correctDescribe2) mustBe SparqlQueryType.DESCRIBE + + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectDescribe1) + } + } + + it should "determine SparqlQueryType CONSTRUCT" in { + val correctConstruct1 = s"""PREFIX foaf: + PREFIX org: + CONSTRUCT { ?x foaf:name ?name } + WHERE { ?x org:employeeName ?name }""" + val correctConstruct2 = s""" + construct { } + WHERE { }""" + val incorrectConstruct1 = s""" + construct { ?x foaf:name ?name }""" + val incorrectConstruct2 = s"""PREFIX foaf: + PREFIX org: + Construct { } + Where { ?x org:employeeName ?name }""" + + SparqlQueryType.determineSparqlQueryType(correctConstruct1) mustBe SparqlQueryType.CONSTRUCT + SparqlQueryType.determineSparqlQueryType(correctConstruct2) mustBe SparqlQueryType.CONSTRUCT + + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectConstruct1) + } + intercept[IllegalArgumentException]{ + SparqlQueryType.determineSparqlQueryType(incorrectConstruct2) + } + } +} diff --git a/silk-workspace/src/main/scala/org/silkframework/workspace/activity/workflow/LocalWorkflowExecutor.scala b/silk-workspace/src/main/scala/org/silkframework/workspace/activity/workflow/LocalWorkflowExecutor.scala index 9ab9d218c8..e64c8489d3 100644 --- a/silk-workspace/src/main/scala/org/silkframework/workspace/activity/workflow/LocalWorkflowExecutor.scala +++ b/silk-workspace/src/main/scala/org/silkframework/workspace/activity/workflow/LocalWorkflowExecutor.scala @@ -116,6 +116,7 @@ case class LocalWorkflowExecutor(workflowTask: ProjectTask[Workflow], executorOutput: ExecutorOutput, operator: WorkflowOperator) (implicit workflowRunContext: WorkflowRunContext): Option[LocalEntities] = { + implicit val userContext: UserContext = workflowRunContext.userContext try { val operatorTask = task(operatorNode) val schemataOpt = operatorTask.data.inputSchemataOpt diff --git a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala index 53118a9d47..3f9cff89b1 100644 --- a/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala +++ b/silk-workspace/src/test/scala/org/silkframework/workspace/WorkspaceTest.scala @@ -2,11 +2,10 @@ package org.silkframework.workspace import java.util.concurrent.atomic.AtomicBoolean +import org.mockito.Mockito._ import org.scalatest.{FlatSpec, MustMatchers} import org.scalatestplus.mockito.MockitoSugar -import org.mockito.Mockito._ import org.silkframework.config.{CustomTask, PlainTask, Task, TaskSpec} -import org.silkframework.dataset.rdf.SparqlEndpoint import org.silkframework.entity.EntitySchema import org.silkframework.runtime.activity.{Activity, ActivityContext, TestUserContextTrait, UserContext} import org.silkframework.runtime.plugin.PluginRegistry @@ -18,7 +17,6 @@ import org.silkframework.workspace.activity.TaskActivityFactory import org.silkframework.workspace.resources.InMemoryResourceRepository import scala.collection.immutable.ListMap -import scala.collection.mutable import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.reflect.ClassTag