From 46fc588d60bd108e609dc180dc08a7dfa7c5527e Mon Sep 17 00:00:00 2001 From: Pawel Mazurek <52866094+cyberhead-pl@users.noreply.github.com> Date: Mon, 20 Jan 2025 14:20:25 +0100 Subject: [PATCH 1/2] CASL-448 old -> new tests --- .../kotlin/naksha/psql/PgSession.kt | 28 +- .../kotlin/naksha/psql/executors/PgWriter.kt | 1 + .../kotlin/naksha/psql/PgQueryBuilderTest.kt | 265 ++++++++++++ .../kotlin/naksha/psql/TransactionsTest.kt | 55 ++- .../kotlin/old/NakshaSessionTest.kt | 150 ------- .../commonTest/kotlin/old/PsqlInstanceTest.kt | 31 -- .../src/commonTest/kotlin/old/README.md | 6 - .../kotlin/old/ReadQueryBuilderTest.kt | 406 ------------------ .../src/commonTest/kotlin/old/TestPsql.kt | 91 ---- .../kotlin/old/WriteFeaturesTest.kt | 81 ---- .../kotlin/naksha/psql/PsqlInstanceTest.kt | 32 ++ 11 files changed, 355 insertions(+), 791 deletions(-) create mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/NakshaSessionTest.kt delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/PsqlInstanceTest.kt delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/README.md delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/ReadQueryBuilderTest.kt delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/TestPsql.kt delete mode 100644 here-naksha-lib-psql/src/commonTest/kotlin/old/WriteFeaturesTest.kt create mode 100644 here-naksha-lib-psql/src/jvmTest/kotlin/naksha/psql/PsqlInstanceTest.kt diff --git a/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/PgSession.kt b/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/PgSession.kt index b75870534..8ea05b46f 100644 --- a/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/PgSession.kt +++ b/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/PgSession.kt @@ -310,26 +310,12 @@ open class PgSession( } } - private var isTransactionStored = false - private fun saveTransactionIntoDb(create: Boolean = false) { - // FIXME instead of create/update we can use upsert when ready - if (isTransactionStored && create) { - return - } else if (isTransactionStored) { - val updateTxReq = WriteRequest() - val updateTx = Write() - updateTxReq.add(updateTx) - updateTx.updateFeature(null, VIRT_TRANSACTIONS, transaction()) - // FIXME uncomment when counts and update ready -// PgWriter(this, updateTxReq).execute() - } else { - val writeTxReq = WriteRequest() - val writeTx = Write() - writeTxReq.add(writeTx) - writeTx.createFeature(null, VIRT_TRANSACTIONS, transaction()) - PgWriter(this, writeTxReq, InstantWriteExecutor(this)).execute() - isTransactionStored = true - } + private fun saveTransactionIntoDb() { + val writeTxReq = WriteRequest() + val writeTx = Write() + writeTxReq.add(writeTx) + writeTx.upsertFeature(null, VIRT_TRANSACTIONS, transaction()) + PgWriter(this, writeTxReq, InstantWriteExecutor(this)).execute() } /** @@ -363,7 +349,7 @@ open class PgSession( val tx = transaction if (tx != null) { try { - saveTransactionIntoDb(true) + saveTransactionIntoDb() } catch (e: Throwable) { throw NakshaException(EXCEPTION, "Failed to save transaction", cause = e) } diff --git a/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/executors/PgWriter.kt b/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/executors/PgWriter.kt index f2a930be0..4b32aed95 100644 --- a/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/executors/PgWriter.kt +++ b/here-naksha-lib-psql/src/commonMain/kotlin/naksha/psql/executors/PgWriter.kt @@ -278,6 +278,7 @@ class PgWriter( // If everything was done perfectly, fine. val tupleNumberByteArray = TupleNumberByteArray(storage, tupleNumbers.toByteArray()) + session.transaction().featuresModified += tupleNumbers.size return SuccessResponse( PgResultSet( storage, diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt new file mode 100644 index 000000000..9b5540a5f --- /dev/null +++ b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt @@ -0,0 +1,265 @@ +package naksha.psql + +import naksha.geo.PointCoord +import naksha.geo.SpGeometry +import naksha.model.Naksha.NakshaCompanion.VIRT_COLLECTIONS +import naksha.model.request.ReadCollections +import naksha.model.request.ReadFeatures +import naksha.model.request.RequestQuery +import naksha.model.request.query.* +import naksha.model.request.query.StringOp.QStringOpCompanion.EQUALS +import naksha.model.request.query.TupleColumn.TupleColumn_C.ID +import naksha.model.request.query.TupleColumn.TupleColumn_C.UID +import naksha.psql.base.PgTestBase +import naksha.psql.executors.query.PgQueryBuilder +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertTrue + +@Suppress("UNCHECKED_CAST") +class PgQueryBuilderTest : PgTestBase() { + + private val session = storage.newReadSession() as PgSession + + @Test + fun testReadNoConditions() { + // given + val req = ReadFeatures().apply { collectionIds += "foo" } + + // when + + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(0, query.argValues.size) + assertEquals( + """ + SELECT gzip(bytea_agg(tuple_number)) AS rs FROM (SELECT tuple_number FROM ( + (SELECT tuple_number, id FROM foo) + ) ORDER BY id, tuple_number) LIMIT 1000000; + """.trimIndent(), query.sql.trimIndent() + ) + } + + @Test + fun testReadMultipleCollections() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo1" + collectionIds += "foo2" + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(0, query.argValues.size) + assertEquals( + """ + SELECT gzip(bytea_agg(tuple_number)) AS rs FROM (SELECT tuple_number FROM ( + (SELECT tuple_number, id FROM foo1) UNION ALL + (SELECT tuple_number, id FROM foo2) + ) ORDER BY id, tuple_number) LIMIT 1000000; + """.trimIndent(), query.sql.trimIndent() + ) + } + + @Test + fun testReadById() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + featureIds += "f1" + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(1, query.argValues.size) + assertEquals("f1", (query.argValues[0] as Array)[0]) + assertEquals( + """(SELECT tuple_number, id FROM foo WHERE id = ANY($1))""", + removeLimitWrapper(query.sql) + ) + } + + @Test + fun testReadWithOr() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + featureIds += "f1" + featureIds += "f2" + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(1, query.argValues.size) + assertTrue(arrayOf("f1", "f2") contentEquals (query.argValues[0] as Array)) + assertEquals( + """(SELECT tuple_number, id FROM foo WHERE id = ANY($1))""", + removeLimitWrapper(query.sql) + ) + } + + // TODO FIXME uncomment me once property read is ready. + // @Test + fun testReadWithAnd() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + query = RequestQuery().apply { + properties = POr( + PQuery(Property(ID), EQUALS, "f1"), + PAnd( + PQuery(Property(ID), EQUALS, "f2"), + PQuery(Property(UID), DoubleOp.LT, 2.0) + ) + ) + } + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(0, query.argValues.size) + assertEquals( + """((SELECT tuple_number, id FROM foo WHERE (id=$1 OR (id=$2 AND uid<$3)))""", + removeLimitWrapper(query.sql) + ) + } + + @Test + fun testReadHistory() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + queryHistory = true + } + + // when + val query = PgQueryBuilder(session, req).build() + + + // then + assertEquals( + """ + (SELECT tuple_number, id FROM foo) UNION ALL + (SELECT tuple_number, id FROM "foo${'$'}hst") + """.trimIndent(), removeLimitWrapper(query.sql) + ) + } + + @Test + fun testReadWithHistoryAndDel() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + featureIds += "f1" + queryHistory = true + queryDeleted = true + } + + // when + val query = PgQueryBuilder(session, req).build() + + + // then + assertEquals( + """ + (SELECT tuple_number, id FROM foo WHERE id = ANY(${'$'}1)) UNION ALL + (SELECT tuple_number, id FROM "foo${'$'}del" WHERE id = ANY($1)) UNION ALL + (SELECT tuple_number, id FROM "foo${'$'}hst" WHERE id = ANY($1)) + """.trimIndent(), removeLimitWrapper(query.sql) + ) + } + + + @Test + fun testReadBySpatial() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + query = RequestQuery().apply { + spatial = SpIntersects(SpGeometry(PointCoord(1.0, 1.0, 1.0))) + } + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals( + """(SELECT tuple_number, id FROM foo WHERE (ST_Intersects(naksha_geometry(geo, flags), naksha_geometry($1, 0))))""", + removeLimitWrapper(query.sql) + ) + } + + @Test + fun testReadBySpatialWithBuffer() { + // given + val geometryTransformation = SpBuffer(22.2, geography = true) + val req = ReadFeatures().apply { + collectionIds += "foo" + query = RequestQuery().apply { + spatial = SpIntersects(SpGeometry(PointCoord(1.0, 1.0, 1.0)), geometryTransformation) + } + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals( + """(SELECT tuple_number, id FROM foo WHERE (ST_Intersects(naksha_geometry(geo, flags), ST_Buffer(naksha_geometry($1, 0)::geography, $2))))""", + removeLimitWrapper(query.sql) + ) + } + + @Test + fun testReadAllCollections() { + // given + val req = ReadCollections() + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(0, query.argValues.size) + assertEquals( + """(SELECT tuple_number, id FROM "$VIRT_COLLECTIONS")""", + removeLimitWrapper(query.sql) + ) + } + + @Test + fun testTagsQuery() { + // given + val req = ReadFeatures().apply { + collectionIds += "foo" + query = RequestQuery().apply { + tags = TagExists("stg") + } + } + + // when + val query = PgQueryBuilder(session, req).build() + + // then + assertEquals(1, query.argValues.size) + assertEquals( + """(SELECT tuple_number, id FROM foo WHERE (naksha_tags(tags, flags) ?? $1))""", + removeLimitWrapper(query.sql) + ) + } + + + private fun removeLimitWrapper(sql: String) = + sql.replace("SELECT gzip(bytea_agg(tuple_number)) AS rs FROM (SELECT tuple_number FROM (\n", "") + .replace("\n) ORDER BY id, tuple_number) LIMIT 1000000;", "") + .trimIndent() +} diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/TransactionsTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/TransactionsTest.kt index 5e96ff523..4733e4771 100644 --- a/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/TransactionsTest.kt +++ b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/TransactionsTest.kt @@ -1,21 +1,19 @@ package naksha.psql -import kotlinx.datetime.Clock.System.now -import kotlinx.datetime.TimeZone.Companion.currentSystemDefault -import kotlinx.datetime.toLocalDateTime import naksha.model.Naksha import naksha.model.NakshaCache import naksha.model.objects.NakshaCollection import naksha.model.objects.NakshaFeature +import naksha.model.objects.Transaction import naksha.model.request.ReadFeatures import naksha.model.request.SuccessResponse import naksha.model.request.Write import naksha.model.request.WriteRequest import naksha.psql.base.PgTestBase -import naksha.psql.util.ProxyFeatureGenerator.generateRandomFeature -import kotlin.test.AfterTest import kotlin.test.Test import kotlin.test.assertEquals +import kotlin.test.assertIs +import kotlin.test.assertTrue class TransactionsTest : PgTestBase(NakshaCollection("transaction_test")) { @@ -43,4 +41,51 @@ class TransactionsTest : PgTestBase(NakshaCollection("transaction_test")) { // then assertEquals(savedFeatureVersion, readResponse.tuples[0]?.tuple?.meta?.version) } + + @Test + fun updateTrasactionInfoOnMultipleWrites() { + // given + val feature1 = NakshaFeature("f2") + val writeRequest1 = WriteRequest().apply { add(Write().createFeature(map = null, collection!!.id, feature1)) } + + val feature2 = NakshaFeature("f3") + val writeRequest2 = WriteRequest().apply { add(Write().createFeature(map = null, collection!!.id, feature2)) } + + val writeSession = env.storage.newWriteSession(null) + + // when + assertIs(writeSession.execute(writeRequest1)) + + // then + assertEquals(1, writeSession.transaction().featuresModified) + + // when + val value = writeSession.execute(writeRequest2) + assertIs(value) + + // then + assertEquals(2, writeSession.transaction().featuresModified) + } + + @Test + fun shouldBeAbleToTagTransaction() { + // given + val feature = NakshaFeature("f40") + val writeRequest = WriteRequest().apply { add(Write().createFeature(map = null, collection!!.id, feature)) } + + val writeSession = env.storage.newWriteSession(null) + + // when + writeSession.transaction().properties.xyz.addTag("sth", false) + assertIs(writeSession.execute(writeRequest)) + val transactionId = writeSession.transaction().id + writeSession.commit() + + // then + val readRequest = ReadFeatures(Naksha.VIRT_TRANSACTIONS).apply { + featureIds += transactionId + } + val readResponse = storage.newReadSession().execute(readRequest) as SuccessResponse + assertTrue(readResponse.features[0]!!.properties.xyz.tags!!.contains("sth")) + } } \ No newline at end of file diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/NakshaSessionTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/old/NakshaSessionTest.kt deleted file mode 100644 index 27aa1123d..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/NakshaSessionTest.kt +++ /dev/null @@ -1,150 +0,0 @@ -package old - -//import naksha.jbon.XyzBuilder -//import naksha.model.Flags.DEFAULT_FLAGS -//import naksha.model.NakshaCollectionProxy -//import naksha.model.NakshaCollectionProxy.Companion.PARTITION_COUNT_NONE -//import naksha.model.request.op.InsertRow -//import naksha.model.request.op.WriteFeature -//import naksha.model.request.WriteRequest -//import naksha.model.request.op.WriteRow -//import naksha.model.request.ErrorResponse -//import naksha.model.Row -//import naksha.model.request.SuccessResponse -//import naksha.psql.* -//import naksha.psql.NKC_TABLE -//import naksha.psql.PsqlTestStorage -//import org.junit.jupiter.api.Assertions.* -//import org.junit.jupiter.api.Test -// -//class NakshaSessionTest : TestBasics() { -// -// private val collectionId = "foo_common" -// private val writeSession = storage.newWriteSession(PsqlTestStorage.context) -// -// @Test -// fun testGetBaseCollectionId() { -// // expect -// assertEquals("foo", writeSession.getBaseCollectionId("foo\$p7")) -// } -// -// @Test -// fun testEnsureHistoryPartition() { -// // given -// createCollection(session = writeSession, collectionId = collectionId, partitionCount = 8, disableHistory = false) -// -// // then -// val collectionConfig = writeSession.getCollectionConfig(collectionId) -// val isHistoryDisabled: Boolean = collectionConfig.disableHistory -// assertFalse(isHistoryDisabled) -// val partitionCount: Int = collectionConfig.partitions -// assertEquals(8, partitionCount) -// val expectedPartitionName = "${collectionId}\$hst_${writeSession.txn().year()}" -// assertTrue(doesTableExist(writeSession, expectedPartitionName)) -// } -// -// @Test -// fun transactionShouldBeUpdatedWhenExecutingWriteFeaturesMultipleTimes() { -// // given -// createCollection(session = writeSession, collectionId = collectionId, partitionCount = 8, disableHistory = false) -// val otherCollection = "collection2" -// createCollection(session = writeSession, collectionId = otherCollection, partitionCount = 8, disableHistory = false) -// writeSession.clear() -// -// val op1 = InsertRow(collectionId, Row(storage, DEFAULT_FLAGS, "feature1")) -// val op2 = InsertRow(collectionId, Row(storage, DEFAULT_FLAGS, "feature2")) -// val op3 = InsertRow(otherCollection, Row(storage, DEFAULT_FLAGS, "feature2")) -// -// // when -// writeSession.write(WriteRequest(ops = arrayOf(op1))) -// -// // then -// assertEquals(1, writeSession.transaction.featuresModified) -// assertEquals(1, writeSession.transaction.collections[collectionId]?.inserted) -// -// // when executed again in same session -// writeSession.write(WriteRequest(ops = arrayOf(op2, op3))) -// -// // then -// assertEquals(3, writeSession.transaction.featuresModified) -// assertEquals(2, writeSession.transaction.collections[collectionId]?.inserted) -// assertEquals(1, writeSession.transaction.collections[otherCollection]?.inserted) -// } -// -// @Test -// fun writeFeaturesShouldNotAllowMultipleOperationsOnSameFeature() { -// // given -// val collectionId = "foo" -// writeSession.collectionConfiguration.put(collectionId, NakshaCollectionProxy(collectionId, partitions = 1, autoPurge = false, disableHistory = false)) -// -// val op1 = InsertRow(collectionId, Row(storage, DEFAULT_FLAGS, "someId")) -// val op2 = InsertRow(collectionId, Row(storage, DEFAULT_FLAGS, "someId")) -// -// // when -// val result = writeSession.write(WriteRequest(ops = arrayOf(op1, op2))) -// -// // then -// assertInstanceOf(ErrorResponse::class.java, result) -// assertEquals("Cannot perform multiple operations on single feature in one transaction", (result as ErrorResponse).reason.message) -// } -// -// @Test -// fun canTagTransaction() { -// // given -// createCollection(session = writeSession, collectionId = collectionId, partitionCount = 8, disableHistory = false) -// writeSession.clear() -// writeSession.commit() -// -// val op1 = InsertRow(collectionId, Row(storage, DEFAULT_FLAGS, "feature1")) -// -// // when -// val result1 = writeSession.write(WriteRequest(ops = arrayOf(op1))) -// // then -// -// assertInstanceOf(SuccessResponse::class.java, result1) { (result1 as ErrorResponse).reason.message} -// assertEquals(1, writeSession.transaction.featuresModified) -// assertEquals(1, writeSession.transaction.collections[collectionId]?.inserted) -// assertEquals(0, writeSession.transaction.collections[collectionId]?.updated) -// assertEquals(1, (result1 as SuccessResponse).rows[0].row?.meta?.version) -// -// // when -// val tagBuilder = XyzBuilder() -// -// tagBuilder.startTags() -// tagBuilder.writeTag("tag1") -// val tagsBytes = tagBuilder.buildTags() -// -// val op2 = WriteRow(PgStatic.SC_TRANSACTIONS, Row(storage, DEFAULT_FLAGS, writeSession.transaction.id, tags = tagsBytes)) -// -// val result2 = writeSession.write(WriteRequest(ops = arrayOf(op2))) -// -// // then -// assertInstanceOf(SuccessResponse::class.java, result2) { (result2 as ErrorResponse).reason.message} -// assertEquals(2, writeSession.transaction.featuresModified) -// assertEquals(1, writeSession.transaction.collections[collectionId]?.inserted) -// assertEquals(1, writeSession.transaction.collections[PgStatic.SC_TRANSACTIONS]?.updated) -// val rows = (result2 as SuccessResponse).rows -// assertEquals(4, rows[0].row?.meta?.version) -// } -// -// private fun createCollection(session: NakshaSession, collectionId: String, partitionCount: Int = PARTITION_COUNT_NONE, disableHistory: Boolean = true) { -// val collection = NakshaCollectionProxy( -// id = collectionId, -// partitions = partitionCount, -// disableHistory = disableHistory, -// autoPurge = false -// ) -// -// val op = WriteFeature(NKC_TABLE, collection) -// -// session.write(WriteRequest(ops = arrayOf(op))) -// session.commit() -// } -// -// private fun doesTableExist(writeSession: NakshaSession, tableName: String): Boolean { -// val result = writeSession.usePgConnection().asRows(writeSession.usePgConnection().execute("""SELECT EXISTS ( -// SELECT FROM pg_tables WHERE tablename = '$tableName' -// );""")) -// return result!![0]["exists"] as Boolean -// } -//} diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/PsqlInstanceTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/old/PsqlInstanceTest.kt deleted file mode 100644 index 515e0d8b6..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/PsqlInstanceTest.kt +++ /dev/null @@ -1,31 +0,0 @@ -package old -// -//import naksha.psql.PsqlInstance -//import kotlin.test.Test -//import kotlin.test.* -// -//class PsqlInstanceTest { -// @Test -// fun testParsing() { -// val instance = PsqlInstance.get("jdbc:postgresql://localhost/unimap?user=postgres&password=secret") -// assertNotNull(instance) -// assertEquals("localhost", instance.host) -// assertEquals(5432, instance.port) -// assertEquals("unimap", instance.database) -// assertEquals("postgres", instance.user) -// assertEquals("secret", instance.password) -// assertFalse(instance.readOnly) -// } -// -// @Test -// fun testDeclaring() { -// val instance = PsqlInstance.get("localhost", 5432, "unimap", "postgres", "secret") -// assertNotNull(instance) -// assertEquals("localhost", instance.host) -// assertEquals(5432, instance.port) -// assertEquals("unimap", instance.database) -// assertEquals("postgres", instance.user) -// assertEquals("secret", instance.password) -// assertFalse(instance.readOnly) -// } -//} \ No newline at end of file diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/README.md b/here-naksha-lib-psql/src/commonTest/kotlin/old/README.md deleted file mode 100644 index 5926340cf..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/README.md +++ /dev/null @@ -1,6 +0,0 @@ -Tests in this directory come from previous version of Naksha design. -The goal is to remove this directory entirely. -We should delete an old test **only** if one of the following holds true: -- given test does not make sense in current design -- given test was moved to "working test set" and fixed -- other test from "working test set" already covers the same logic \ No newline at end of file diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/ReadQueryBuilderTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/old/ReadQueryBuilderTest.kt deleted file mode 100644 index 32bc4c87f..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/ReadQueryBuilderTest.kt +++ /dev/null @@ -1,406 +0,0 @@ -//package naksha.psql -// -//import naksha.geo.GeometryProxy -//import naksha.model.Naksha.NakshaUtilCompanion.VIRT_COLLECTIONS -//import naksha.model.request.ReadCollections -//import naksha.model.request.ReadFeatures -//import naksha.model.request.ReadFeatures.ReadFeaturesCompanion.readIdsOnly -//import naksha.model.request.RowOptions -//import naksha.model.request.query.* -//import naksha.model.request.query.Property.PropRefCompanion.id -//import naksha.model.request.query.Property.PropRefCompanion.uid -//import naksha.model.request.query.DoubleOp.QNumericOpCompanion.LT -//import naksha.model.request.query.AnyOp.QOpCompanion.IS_NOT_NULL -//import naksha.model.request.query.StringOp.QStringOpCompanion.EQUALS -//import naksha.psql.read.ReadQueryBuilder -//import kotlin.test.Test -//import kotlin.test.assertEquals -//import kotlin.test.assertTrue -// -//@Suppress("UNCHECKED_CAST") -//class ReadQueryBuilderTest { -// -// private val builder = ReadQueryBuilder() -// -// //@Test -// fun testReadNoConditions() { -// // given -// val req = ReadFeatures().addCollectionId("foo") -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """ -// SELECT * FROM ( -// (SELECT id, type, geo_ref, flags, txn_next, txn, uid, ptxn, puid, version, created_at, updated_at, author_ts, author, app_id, geo_grid, tags, geo, feature FROM "foo") -// ) LIMIT 100000 -// """.trimIndent(), sql.trimIndent() -// ) -// } -// -// //@Test -// fun testReadNoMeta() { -// // given -// val req = ReadFeatures().addCollectionId("foo").withRowOptions(RowOptions(meta = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags, tags, geo, feature FROM "foo")""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadNoMetaNoTags() { -// // given -// val req = ReadFeatures().addCollectionId("foo").withRowOptions(RowOptions(tags = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags, geo, feature FROM "foo")""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadNoMetaNoTagsNoFeature() { -// // given -// val req = ReadFeatures().addCollectionId("foo").withRowOptions(RowOptions(meta = false, tags = false, feature = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags, geo FROM "foo")""", -// removeLimitWrapper(sql) -// ) -// } -// -// -// //@Test -// fun testReadNoMetaNoTagsNoFeatureNoGeometry() { -// // given -// val req = ReadFeatures() -// .addCollectionId("foo") -// .withRowOptions(RowOptions(meta = false, tags = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo")""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadMultipleCollections() { -// // given -// val req = ReadFeatures() -// .addCollectionId("foo1") -// .addCollectionId("foo2") -// .withRowOptions(RowOptions(meta = false, tags = false, feature = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """ -// SELECT * FROM ( -// (SELECT id, type, geo_ref, flags FROM "foo1") -// UNION ALL -// (SELECT id, type, geo_ref, flags FROM "foo2") -// ) LIMIT 100000 -// """.trimIndent(), sql.trimIndent() -// ) -// } -// -// //@Test -// fun testReadById() { -// // given -// val req = readIdsOnly("foo").addId("f1") -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(1, params.size) -// assertEquals("f1", params[0]) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE id=$1)""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadWithOr() { -// // given -// val req = readIdsOnly("foo").addIds("f1", "f2") -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(2, params.size) -// assertTrue(arrayOf("f1", "f2") contentEquals params.toTypedArray()) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE (id=$1 OR id=$2))""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadWithAnd() { -// // given -// val req = readIdsOnly("foo") -// .withQueryProperties(LOr(PQuery(id, EQUALS, "f1"), LAnd(PQuery(id, EQUALS, "f2"), PQuery(uid, LT, "f1")))) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(3, params.size) -// assertEquals(1111, params[2]) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE (id=$1 OR (id=$2 AND uid<$3)))""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadWithHistory() { -// // given -// val req = ReadFeatures() -// .addCollectionId("foo") -// .withQueryHistory() -// .withRowOptions(RowOptions(tags = false, feature = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals( -// """ -// (SELECT id, type, geo_ref, flags FROM "foo") -// UNION ALL -// (SELECT id, type, geo_ref, flags FROM "foo${'$'}hst") -// """.trimIndent(), removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadWithHistoryAndDel() { -// // given -// val req = ReadFeatures() -// .addCollectionId("foo") -// .withQueryHistory() -// .withQueryDeleted() -// .withQueryProperties(PQuery(id, EQUALS, "X")) -// .withRowOptions(RowOptions(meta = false, tags = false, feature = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(3, params.size) -// assertEquals( -// """ -// (SELECT id, type, geo_ref, flags FROM "foo" WHERE id=$1) -// UNION ALL -// (SELECT id, type, geo_ref, flags FROM "foo${'$'}del" WHERE id=$2) -// UNION ALL -// (SELECT id, type, geo_ref, flags FROM "foo${'$'}hst" WHERE id=$3) -// """.trimIndent(), removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadByIdIsNotNull() { -// // given -// val req = readIdsOnly("foo").withQueryProperties(PQuery(id, IS_NOT_NULL)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE id is not null)""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadBySpatial() { -// // given -// val req = readIdsOnly("foo") -// .withQuerySpatial(SpIntersects(GeometryProxy())) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(1, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE ST_Intersects(naksha_geometry(flags,geo), ST_Force3D(naksha_geometry_in_type(3::int2,$1))))""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadBySpatialWithBuffer() { -// // given -// val geometryTransformation = SpBuffer(22.2, geography = true) -// val req = readIdsOnly("foo") -// .withQuerySpatial(SpIntersects(GeometryProxy(), geometryTransformation)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(1, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE ST_Intersects(naksha_geometry(flags,geo), ST_Buffer(ST_Force3D(naksha_geometry_in_type(3::int2,$1))::geography ,22.2,E'') ))""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadAllCollections() { -// // given -// val req = ReadCollections() -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(0, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags, txn_next, txn, uid, ptxn, puid, version, created_at, updated_at, author_ts, author, app_id, geo_grid, tags, geo, feature FROM "$VIRT_COLLECTIONS")""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadCollectionsById() { -// // given -// val req = ReadCollections() -// .addId("foo") -// .addId("bar") -// .addId("baz") -// .withRowOptions(RowOptions(meta = false, tags = false, feature = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(1, params.size) -// assertTrue(arrayOf("foo", "bar", "baz") contentEquals params[0] as Array) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "$VIRT_COLLECTIONS" WHERE id in $1)""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testReadDeletedCollections() { -// // given -// val req = ReadCollections() -// .addId("foo") -// .withQueryDeleted() -// .withRowOptions(RowOptions(meta = false, tags = false, feature = false, geometry = false)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(2, params.size) -// assertEquals( -// """ -// (SELECT id, type, geo_ref, flags FROM "$VIRT_COLLECTIONS" WHERE id in $1) -// UNION ALL -// (SELECT id, type, geo_ref, flags FROM "$VIRT_COLLECTIONS${'$'}del" WHERE id in $2) -// """.trimIndent().trimMargin(), -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testUuidQuery() { -// // given -// val uuid = "test_storage:building_delta:feature1:2024:01:23:1:0" -// -// val req = readIdsOnly("foo") -// .withQueryProperties(PQuery(Property.uuid, EQUALS, uuid)) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(2, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE (txn=$1 AND uid=$2))""", -// removeLimitWrapper(sql) -// ) -// } -// -// //@Test -// fun testTagsQuery() { -// // given -// -// val req = readIdsOnly("foo") -// .withQueryTags(TagExists("tag1")) -// -// // when -// val (sql, params) = builder.build(req) -// -// // then -// assertEquals(1, params.size) -// assertEquals( -// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE tags_to_jsonb(tags) ? $1)""", -// removeLimitWrapper(sql) -// ) -// } -// -// @Test -// fun testAnyQuery() { -// // TODO: Repair me!!!! -// // given -// val txns = arrayOf("11", "22") -//// val req = readIdsBy("foo", POp.POpCompanion.any(TXN, txns)) -// -// // when -//// val (sql, params) = builder.build(req) -// -// // then -//// assertEquals(1, params.size) -//// assertEquals(txns, params[0] as Array) -//// assertEquals( -//// """(SELECT id, type, geo_ref, flags FROM "foo" WHERE txn=ANY($1))""", -//// removeLimitWrapper(sql) -//// ) -// -// } -// -// private fun removeLimitWrapper(sql: String) = -// sql.replace("SELECT * FROM (\n", "") -// .replace(") LIMIT 100000", "") -// .trimIndent() -//} diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/TestPsql.kt b/here-naksha-lib-psql/src/commonTest/kotlin/old/TestPsql.kt deleted file mode 100644 index c7dfdea2e..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/TestPsql.kt +++ /dev/null @@ -1,91 +0,0 @@ -package old - -import naksha.base.* -import naksha.base.PlatformUtil.PlatformUtilCompanion.randomString -import naksha.model.objects.NakshaCollection -import naksha.model.request.* -import naksha.psql.PgUtil -import naksha.psql.base.PgTestBase -import kotlin.test.* - -/** - * We add all tests into a single file, because ordering of tests is not supported, and we do not want to create a new schema and initialize the database for every single test. I understand that in theory, each test should be independent, but if we do this, tests will become so slow, that it takes hours to run them all eventually, and this is worse than the alternative of having tests being strongly dependent on each other. Specifically, this makes writing more tests faster, because we can reuse test code and create multiple things in a row, testing multiple things at ones, and not need to always set up everything gain. As said, it is true that this way of testing is suboptimal from testing perspective, but it is a lot faster in writing the tests, and quicker at runtime, and it is more important to have fast tests, and spend only a minimal amount of time creating them, than to have the perfect tests. This is not a nuclear plant! - */ -class TestPsql: PgTestBase() { - - private fun isLockReleased(collectionId: String): Boolean { - val lock = PgUtil.lockId(collectionId).toLong() - useConnection().execute( - "select count(*) as count from pg_locks where locktype='advisory' and ((classid::bigint << 32) | objid::bigint) = $lock;" - ).fetch().use { - return (it.column("count") as Int64).toInt() == 0 - } - } - - - // @Test TODO: fix dropping - fun create_collection_and_drop_it() { - val col = NakshaCollection(randomString()) - val writeRequest = WriteRequest() - writeRequest.writes += Write().createCollection(null, col) - var session = env.storage.newWriteSession() - session.use { - val response = session.execute(writeRequest) - assertIs(response) - session.commit() - } - - val readRequest = ReadCollections() - readRequest.collectionIds += col.id - val readSession = env.storage.newReadSession() - readSession.use { - val response = readSession.execute(readRequest) - assertIs(response) - assertEquals(1, response.resultSize()) - assertEquals(1, response.features.size) - val feature = response.features[0] - assertNotNull(feature) - assertEquals(col.id, feature.id) - } - - val dropRequest = WriteRequest() - dropRequest.writes += Write().deleteCollectionById(null, col.id) - session = env.storage.newWriteSession() - session.use { - val response = session.execute(dropRequest) - assertIs(response) - } - } - -// private fun create_collection(id: String, partitions: Int) { -// val nakCollection = NakshaCollection(id, partitions, autoPurge = false, disableHistory = false) -// val collectionWriteReq = WriteRequest() -// collectionWriteReq.add(UpsertFeature(NKC_TABLE, nakCollection)) -// try { -// val response: Response = env.pgSession.write(collectionWriteReq) -// assertIs(response, response.toString()) -// val successResponse: SuccessResponse = response -// val responseRow: ResultRow = successResponse.resultSet.rows()[0] -// val row: Row = responseRow.row!! -// assertEquals(id, row.id) -// assertNotNull(row.meta?.rowId()) -// assertSame(CREATED, responseRow.op) -// val collection = responseRow.getFeature()?.proxy(NakshaCollection::class)!! -// assertNotNull(collection) -// assertEquals(id, row.id) -// assertFalse(collection.disableHistory) -// assertEquals(partitions > 0, collection.hasPartitions()) -// assertNotNull(collection.properties) -// assertSame(ACTION_CREATE, Flags(row.meta!!.flags).action()) -// } finally { -// env.pgSession.commit() -// } -// } - - - - @Test - fun read_features_by_geo_intersection(){ - - } -} \ No newline at end of file diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/old/WriteFeaturesTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/old/WriteFeaturesTest.kt deleted file mode 100644 index f56752f13..000000000 --- a/here-naksha-lib-psql/src/commonTest/kotlin/old/WriteFeaturesTest.kt +++ /dev/null @@ -1,81 +0,0 @@ -package old -// -//import naksha.jbon.XYZ_OP_CREATE -//import naksha.base.Int64 -//import naksha.model.* -//import naksha.model.request.op.InsertRow -//import naksha.model.request.WriteRequest -//import naksha.model.Row -//import naksha.model.request.SuccessResponse -//import naksha.psql.write.RowUpdater -//import naksha.psql.write.WriteRequestExecutor -//import org.junit.jupiter.api.Assertions.assertInstanceOf -//import org.junit.jupiter.api.Test -//import org.mockito.ArgumentMatchers.anyString -//import org.mockito.kotlin.any -//import org.mockito.kotlin.doReturn -//import org.mockito.kotlin.mock -//import kotlin.test.assertEquals -//import kotlin.test.assertNotNull -// -//class WriteFeaturesTest { -// -// private val fooCollectionConfig = NakshaCollectionProxy( -// id = "foo", -// partitions = 1, -// autoPurge = false, -// disableHistory = true -// ) -// -// @Test -// fun testWrite() { -// // given -// val collectionId = "foo" -// val row = Row( -// mock(), -// guid = null, -// flags = Flags.DEFAULT_FLAGS, -// id = "foo1", -// feature = "dummyFeature".encodeToByteArray() -// ) -// -// val createFeatureRequest = WriteRequest(ops = arrayOf(InsertRow(collectionId = collectionId, row = row))) -// -// val storageMock = mock { -// on { id() } doReturn "storageId" -// } -// -// val sqlMock = mock { -// on { PgUtil.quoteIdent(anyString()) } doReturn "" -// on { prepare(any(), any()) } doReturn mock() -// on { executeBatch(any(), any()) } doReturn intArrayOf(1) -// } -// -// val sessionMock = mock { -// on { getBaseCollectionId(collectionId) } doReturn collectionId -// on { getCollectionConfig(collectionId) } doReturn fooCollectionConfig -// on { storage } doReturn storageMock -// on { txn() } doReturn Txn(Int64(1)) -// on { txnTs() } doReturn Int64(2) -// on { rowUpdater } doReturn RowUpdater(it) -// } -// -// val executor = WriteRequestExecutor(sessionMock, false) -// -// // when -// val result = executor.write(createFeatureRequest) -// -// // then -// assertInstanceOf(SuccessResponse::class.java, result) -// assertEquals(1, result.rows.size) -// val row0 = result.rows[0].row -// assertEquals(XYZ_EXEC_CREATED, result.rows[0].op) -// assertNotNull(row0) -// assertEquals(row.id, row0.id) -// assertEquals(Flags.DEFAULT_FLAGS, row0.flags) -// assertEquals(0, row0.meta?.uid) -// assertEquals(XYZ_OP_CREATE.toShort(), row0.meta?.action) -// assertEquals(1, row0.meta?.version) -// assertEquals(-1906261745, row0.meta?.fnva1) -// } -//} diff --git a/here-naksha-lib-psql/src/jvmTest/kotlin/naksha/psql/PsqlInstanceTest.kt b/here-naksha-lib-psql/src/jvmTest/kotlin/naksha/psql/PsqlInstanceTest.kt new file mode 100644 index 000000000..a728ae20e --- /dev/null +++ b/here-naksha-lib-psql/src/jvmTest/kotlin/naksha/psql/PsqlInstanceTest.kt @@ -0,0 +1,32 @@ +package naksha.psql + +import kotlin.test.Test +import kotlin.test.assertEquals +import kotlin.test.assertFalse +import kotlin.test.assertNotNull + +class PsqlInstanceTest { + @Test + fun testParsing() { + val instance = PsqlInstance.get("jdbc:postgresql://localhost/unimap?user=postgres&password=secret") + assertNotNull(instance) + assertEquals("localhost", instance.host) + assertEquals(5432, instance.port) + assertEquals("unimap", instance.database) + assertEquals("postgres", instance.user) + assertEquals("secret", instance.password) + assertFalse(instance.readOnly) + } + + @Test + fun testDeclaring() { + val instance = PsqlInstance.get("localhost", 5432, "unimap", "postgres", "secret") + assertNotNull(instance) + assertEquals("localhost", instance.host) + assertEquals(5432, instance.port) + assertEquals("unimap", instance.database) + assertEquals("postgres", instance.user) + assertEquals("secret", instance.password) + assertFalse(instance.readOnly) + } +} \ No newline at end of file From 772fc5ead1cb598a927da8b528d9eb42caef44be Mon Sep 17 00:00:00 2001 From: Pawel Mazurek <52866094+cyberhead-pl@users.noreply.github.com> Date: Tue, 21 Jan 2025 10:27:18 +0100 Subject: [PATCH 2/2] jira ticket info for property read --- .../src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt index 9b5540a5f..1a8988a5b 100644 --- a/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt +++ b/here-naksha-lib-psql/src/commonTest/kotlin/naksha/psql/PgQueryBuilderTest.kt @@ -105,7 +105,7 @@ class PgQueryBuilderTest : PgTestBase() { ) } - // TODO FIXME uncomment me once property read is ready. + // TODO FIXME uncomment me once property read is ready (CASL-473). // @Test fun testReadWithAnd() { // given