Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 55 additions & 47 deletions src/main/scala/com/rawlabs/das/sqlite/DASSqliteTable.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,11 @@ class DASSqliteTable(backend: DASSqliteBackend, defn: TableDefinition, maybePrim
*/
override def tableEstimate(quals: Seq[Qual], columns: Seq[String]): TableEstimate = {
// 1) Build the same WHERE clause used in `execute(...)`.
val supportedQuals = quals.flatMap(qualToSql)

val whereClause =
if (quals.isEmpty) ""
else "\nWHERE " + quals.map(qualToSql).mkString(" AND ")
if (supportedQuals.isEmpty) ""
else "\nWHERE " + supportedQuals.mkString(" AND ")

// 2) Possibly use columns if you want to estimate only the subset of columns,
// or just use "*" or "1" to get an overall row count approximation.
Expand Down Expand Up @@ -210,9 +212,10 @@ class DASSqliteTable(backend: DASSqliteBackend, defn: TableDefinition, maybePrim
else columns.map(quoteIdentifier).mkString(", ")

// Build WHERE from `quals`
val supportedQuals = quals.flatMap(qualToSql)
val whereClause =
if (quals.isEmpty) ""
else "\nWHERE " + quals.map(qualToSql).mkString(" AND ")
if (supportedQuals.isEmpty) ""
else "\nWHERE " + supportedQuals.mkString(" AND ")

// Build ORDER BY
val orderByClause =
Expand Down Expand Up @@ -315,76 +318,81 @@ class DASSqliteTable(backend: DASSqliteBackend, defn: TableDefinition, maybePrim
str.replace("'", "''") // naive approach for single quotes

/**
* Maps an Operator enum to the corresponding SQL string. Some operators like ILIKE are not native to SQLite, so we
* provide a naive fallback or throw an exception.
* Maps an Operator enum to the corresponding SQL string. Some operators like ILIKE are not native to SQLite, so we do
* not handle them.
*/
private def operatorToSql(op: Operator): String = {
private def operatorToSql(op: Operator): Option[String] = {
op match {
case Operator.EQUALS => "="
case Operator.NOT_EQUALS => "<>"
case Operator.LESS_THAN => "<"
case Operator.LESS_THAN_OR_EQUAL => "<="
case Operator.GREATER_THAN => ">"
case Operator.GREATER_THAN_OR_EQUAL => ">="
case Operator.LIKE => "LIKE"
case Operator.NOT_LIKE => "NOT LIKE"

// SQLite does not have native ILIKE support. We can fallback to "LIKE" or fail.
case Operator.ILIKE => throw new IllegalArgumentException("SQLite does not support ILIKE.")
case Operator.NOT_ILIKE => throw new IllegalArgumentException("SQLite does not support NOT ILIKE.")

// Arithmetic operators might not be typical in a WHERE Qual
case Operator.PLUS => "+"
case Operator.MINUS => "-"
case Operator.TIMES => "*"
case Operator.DIV => "/"
case Operator.MOD => "%"
case Operator.AND => "AND"
case Operator.OR => "OR"

case _ => throw new IllegalArgumentException(s"Unsupported operator: $op")
case Operator.EQUALS => Some("=")
case Operator.NOT_EQUALS => Some("<>")
case Operator.LESS_THAN => Some("<")
case Operator.LESS_THAN_OR_EQUAL => Some("<=")
case Operator.GREATER_THAN => Some(">")
case Operator.GREATER_THAN_OR_EQUAL => Some(">=")
case Operator.LIKE => Some("LIKE")
case Operator.NOT_LIKE => Some("NOT LIKE")

// May be less typical in a WHERE clause
case Operator.PLUS => Some("+")
case Operator.MINUS => Some("-")
case Operator.TIMES => Some("*")
case Operator.DIV => Some("/")
case Operator.MOD => Some("%")
case Operator.AND => Some("AND")
case Operator.OR => Some("OR")

case _ => None
}
}

/**
* `IsAllQual` means "col op ALL these values", we interpret as multiple AND clauses
*/
private def isAllQualToSql(colName: String, iq: IsAllQual): String = {
val opStr = operatorToSql(iq.getOperator)
val clauses = iq.getValuesList.asScala.map(v => s"$colName $opStr ${valueToSql(v)}")
// Combine with AND
clauses.mkString("(", " AND ", ")")
private def isAllQualToSql(colName: String, iq: IsAllQual): Option[String] = {
operatorToSql(iq.getOperator) match {
case Some(opStr) =>
val clauses = iq.getValuesList.asScala.map(v => s"$colName $opStr ${valueToSql(v)}")
// Combine with AND
Some(clauses.mkString("(", " AND ", ")"))
case None => None
}
}

/**
* `IsAnyQual` means "col op ANY of these values", we interpret as multiple OR clauses
*/
private def isAnyQualToSql(colName: String, iq: IsAnyQual): String = {
val opStr = operatorToSql(iq.getOperator)
val clauses = iq.getValuesList.asScala.map(v => s"$colName $opStr ${valueToSql(v)}")
// Combine with OR
clauses.mkString("(", " OR ", ")")
private def isAnyQualToSql(colName: String, iq: IsAnyQual): Option[String] = {
operatorToSql(iq.getOperator) match {
case Some(opStr) =>
val clauses = iq.getValuesList.asScala.map(v => s"$colName $opStr ${valueToSql(v)}")
// Combine with OR
Some(clauses.mkString("(", " OR ", ")"))
case None => None
}
}

/**
* `SimpleQual` is a single condition: "col op value"
*/
private def simpleQualToSql(colName: String, sq: SimpleQual): String = {
private def simpleQualToSql(colName: String, sq: SimpleQual): Option[String] = {
if (sq.getValue.hasNull && sq.getOperator == Operator.EQUALS) {
s"$colName IS NULL"
Some(s"$colName IS NULL")
} else if (sq.getValue.hasNull && sq.getOperator == Operator.NOT_EQUALS) {
s"$colName IS NOT NULL"
Some(s"$colName IS NOT NULL")
} else {
val opStr = operatorToSql(sq.getOperator)
val valStr = valueToSql(sq.getValue)
s"$colName $opStr $valStr"
operatorToSql(sq.getOperator) match {
case Some(opStr) =>
val valStr = valueToSql(sq.getValue)
Some(s"$colName $opStr $valStr")
case None => None
}
}
}

/**
* Converts any `Qual` to a SQL snippet. We handle `SimpleQual`, `IsAnyQual`, or `IsAllQual`.
*/
private def qualToSql(q: Qual): String = {
private def qualToSql(q: Qual): Option[String] = {
val colName = quoteIdentifier(q.getName)
if (q.hasSimpleQual) {
simpleQualToSql(colName, q.getSimpleQual)
Expand Down
99 changes: 88 additions & 11 deletions src/test/scala/com/rawlabs/das/sqlite/DASSqliteSimpleTest.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,27 @@ import scala.jdk.CollectionConverters._
import org.scalatest.BeforeAndAfterAll
import org.scalatest.funsuite.AnyFunSuite

import com.rawlabs.das.sdk.DASExecuteResult
import com.rawlabs.protocol.das.v1.query.{Operator, Qual, SimpleQual}
import com.rawlabs.protocol.das.v1.tables.{Column, Row}
import com.rawlabs.protocol.das.v1.types.{Value, ValueDouble, ValueInt, ValueString}
import com.typesafe.scalalogging.StrictLogging

class DASSqliteSimpleTest extends AnyFunSuite with BeforeAndAfterAll with StrictLogging {

test("read mydb file") {
val resourceUrl = getClass.getResource("/mydb")
val file = new java.io.File(resourceUrl.toURI)
val fullPath = file.getAbsolutePath
private var sdk: DASSqlite = _

override def beforeAll(): Unit = {
super.beforeAll()
sdk = buildSdk()
}

val sdk = new DASSqlite(Map("database" -> fullPath))
override def afterAll(): Unit = {
sdk.close()
super.afterAll()
}

test("read mydb file") {
val defs = sdk.tableDefinitions
assert(defs.nonEmpty, "tableDefinitions should not be empty.")
val names = defs.map(_.getTableId.getName)
Expand All @@ -39,11 +48,7 @@ class DASSqliteSimpleTest extends AnyFunSuite with BeforeAndAfterAll with Strict

val rs =
sdk.getTable("COMPANY").get.execute(Seq.empty, Seq("ID", "NAME", "AGE", "ADDRESS", "SALARY"), Seq.empty, None)
val buf = scala.collection.mutable.ListBuffer[Row]()
while (rs.hasNext) {
buf += rs.next()
}
rs.close()
val buf = collectAllRows(rs)

assert(
buf.toList == List(
Expand All @@ -53,8 +58,80 @@ class DASSqliteSimpleTest extends AnyFunSuite with BeforeAndAfterAll with Strict
buildMyDbRow(4, "Mark", 25, "Rich-Mond ", 65000.0),
buildMyDbRow(5, "David", 27, "Texas", 85000.0),
buildMyDbRow(6, "Kim", 22, "South-Hall", 45000.0)))
}

sdk.close()
test("filter mydb with operation that pushes down") {
val rs =
sdk
.getTable("COMPANY")
.get
.execute(
Seq(
Qual
.newBuilder()
.setName("ID")
.setSimpleQual(
SimpleQual
.newBuilder()
.setOperator(Operator.EQUALS)
.setValue(Value.newBuilder().setInt(ValueInt.newBuilder().setV(1)))
.build())
.build()),
Seq("ID", "NAME", "AGE", "ADDRESS", "SALARY"),
Seq.empty,
None)
val buf = collectAllRows(rs)
assert(buf.toList == List(buildMyDbRow(1, "Paul", 32, "California", 20000.0)))
}

test("filter mydb with operation that does NOT push down") {
val rs =
sdk
.getTable("COMPANY")
.get
.execute(
Seq(
Qual
.newBuilder()
.setName("NAME")
.setSimpleQual(
SimpleQual
.newBuilder()
.setOperator(Operator.ILIKE)
.setValue(Value.newBuilder().setString(ValueString.newBuilder().setV("PAUL")))
.build())
.build()),
Seq("ID", "NAME", "AGE", "ADDRESS", "SALARY"),
Seq.empty,
None)
val buf = collectAllRows(rs)

// Since we do NOT push down, we return the entire table
assert(
buf.toList == List(
buildMyDbRow(1, "Paul", 32, "California", 20000.0),
buildMyDbRow(2, "Allen", 25, "Texas", 15000.0),
buildMyDbRow(3, "Teddy", 23, "Norway", 20000.0),
buildMyDbRow(4, "Mark", 25, "Rich-Mond ", 65000.0),
buildMyDbRow(5, "David", 27, "Texas", 85000.0),
buildMyDbRow(6, "Kim", 22, "South-Hall", 45000.0)))
}

private def buildSdk(): DASSqlite = {
val resourceUrl = getClass.getResource("/mydb")
val file = new java.io.File(resourceUrl.toURI)
val fullPath = file.getAbsolutePath

new DASSqlite(Map("database" -> fullPath))
}

private def collectAllRows(rs: DASExecuteResult): Seq[Row] = {
val buf = scala.collection.mutable.ListBuffer[Row]()
while (rs.hasNext) {
buf += rs.next()
}
rs.close()
buf.toList
}

private def buildMyDbRow(id: Int, name: String, age: Int, address: String, salary: Double): Row = {
Expand Down
Loading