Skip to content

Commit 3502662

Browse files
Merge branch 'main' into replace_in_with_any_for_perf
2 parents b599564 + 4e8e31c commit 3502662

File tree

2 files changed

+528
-1
lines changed

2 files changed

+528
-1
lines changed
Lines changed: 399 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,399 @@
1+
package org.hypertrace.core.documentstore;
2+
3+
import static org.hypertrace.core.documentstore.utils.Utils.readFileFromResource;
4+
import static org.junit.jupiter.api.Assertions.assertThrows;
5+
6+
import com.fasterxml.jackson.databind.JsonNode;
7+
import com.fasterxml.jackson.databind.ObjectMapper;
8+
import com.fasterxml.jackson.databind.node.ObjectNode;
9+
import com.typesafe.config.ConfigFactory;
10+
import java.io.IOException;
11+
import java.sql.Connection;
12+
import java.sql.PreparedStatement;
13+
import java.util.HashMap;
14+
import java.util.List;
15+
import java.util.Map;
16+
import java.util.Set;
17+
import org.hypertrace.core.documentstore.postgres.PostgresDatastore;
18+
import org.junit.jupiter.api.AfterAll;
19+
import org.junit.jupiter.api.AfterEach;
20+
import org.junit.jupiter.api.BeforeAll;
21+
import org.junit.jupiter.api.BeforeEach;
22+
import org.junit.jupiter.api.DisplayName;
23+
import org.junit.jupiter.api.Nested;
24+
import org.junit.jupiter.api.Test;
25+
import org.slf4j.Logger;
26+
import org.slf4j.LoggerFactory;
27+
import org.testcontainers.containers.GenericContainer;
28+
import org.testcontainers.containers.wait.strategy.Wait;
29+
import org.testcontainers.junit.jupiter.Testcontainers;
30+
import org.testcontainers.utility.DockerImageName;
31+
32+
/**
33+
* Integration tests for write operations on flat PostgreSQL collections.
34+
*
35+
* <p>Flat collections are PostgreSQL tables with explicit column schemas (not JSONB-based nested
36+
* documents). This test class verifies that Collection interface write operations work correctly on
37+
* such collections.
38+
*/
39+
@Testcontainers
40+
public class FlatCollectionWriteTest {
41+
42+
private static final Logger LOGGER = LoggerFactory.getLogger(FlatCollectionWriteTest.class);
43+
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
44+
private static final String FLAT_COLLECTION_NAME = "myTestFlat";
45+
private static final String INSERT_STATEMENTS_FILE = "query/pg_flat_collection_insert.json";
46+
// Initial data has 10 rows (IDs 1-10)
47+
private static final int INITIAL_ROW_COUNT = 10;
48+
49+
private static Datastore postgresDatastore;
50+
private static Collection flatCollection;
51+
private static GenericContainer<?> postgres;
52+
53+
@BeforeAll
54+
public static void init() throws IOException {
55+
postgres =
56+
new GenericContainer<>(DockerImageName.parse("postgres:13.1"))
57+
.withEnv("POSTGRES_PASSWORD", "postgres")
58+
.withEnv("POSTGRES_USER", "postgres")
59+
.withExposedPorts(5432)
60+
.waitingFor(Wait.forListeningPort());
61+
postgres.start();
62+
63+
String postgresConnectionUrl =
64+
String.format("jdbc:postgresql://localhost:%s/", postgres.getMappedPort(5432));
65+
66+
Map<String, String> postgresConfig = new HashMap<>();
67+
postgresConfig.put("url", postgresConnectionUrl);
68+
postgresConfig.put("user", "postgres");
69+
postgresConfig.put("password", "postgres");
70+
71+
postgresDatastore =
72+
DatastoreProvider.getDatastore("Postgres", ConfigFactory.parseMap(postgresConfig));
73+
LOGGER.info("Postgres datastore initialized: {}", postgresDatastore.listCollections());
74+
75+
createFlatCollectionSchema();
76+
flatCollection =
77+
postgresDatastore.getCollectionForType(FLAT_COLLECTION_NAME, DocumentType.FLAT);
78+
}
79+
80+
private static void createFlatCollectionSchema() {
81+
String createTableSQL =
82+
String.format(
83+
"CREATE TABLE \"%s\" ("
84+
+ "\"id\" TEXT PRIMARY KEY,"
85+
+ "\"item\" TEXT,"
86+
+ "\"price\" INTEGER,"
87+
+ "\"quantity\" INTEGER,"
88+
+ "\"date\" TIMESTAMPTZ,"
89+
+ "\"in_stock\" BOOLEAN,"
90+
+ "\"tags\" TEXT[],"
91+
+ "\"categoryTags\" TEXT[],"
92+
+ "\"props\" JSONB,"
93+
+ "\"sales\" JSONB,"
94+
+ "\"numbers\" INTEGER[],"
95+
+ "\"scores\" DOUBLE PRECISION[],"
96+
+ "\"flags\" BOOLEAN[]"
97+
+ ");",
98+
FLAT_COLLECTION_NAME);
99+
100+
PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore;
101+
102+
try (Connection connection = pgDatastore.getPostgresClient();
103+
PreparedStatement statement = connection.prepareStatement(createTableSQL)) {
104+
statement.execute();
105+
LOGGER.info("Created flat collection table: {}", FLAT_COLLECTION_NAME);
106+
} catch (Exception e) {
107+
LOGGER.error("Failed to create flat collection schema: {}", e.getMessage(), e);
108+
}
109+
}
110+
111+
private static void executeInsertStatements() {
112+
PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore;
113+
try {
114+
String jsonContent = readFileFromResource(INSERT_STATEMENTS_FILE).orElseThrow();
115+
JsonNode rootNode = OBJECT_MAPPER.readTree(jsonContent);
116+
JsonNode statementsNode = rootNode.get("statements");
117+
118+
if (statementsNode == null || !statementsNode.isArray()) {
119+
throw new RuntimeException("Invalid JSON format: 'statements' array not found");
120+
}
121+
122+
try (Connection connection = pgDatastore.getPostgresClient()) {
123+
for (JsonNode statementNode : statementsNode) {
124+
String statement = statementNode.asText().trim();
125+
if (!statement.isEmpty()) {
126+
try (PreparedStatement preparedStatement = connection.prepareStatement(statement)) {
127+
preparedStatement.executeUpdate();
128+
}
129+
}
130+
}
131+
}
132+
LOGGER.info("Inserted initial data into: {}", FLAT_COLLECTION_NAME);
133+
} catch (Exception e) {
134+
LOGGER.error("Failed to execute INSERT statements: {}", e.getMessage(), e);
135+
}
136+
}
137+
138+
@BeforeEach
139+
public void setupData() {
140+
// Clear and repopulate with initial data before each test
141+
clearTable();
142+
executeInsertStatements();
143+
}
144+
145+
private static void clearTable() {
146+
PostgresDatastore pgDatastore = (PostgresDatastore) postgresDatastore;
147+
String deleteSQL = String.format("DELETE FROM \"%s\"", FLAT_COLLECTION_NAME);
148+
try (Connection connection = pgDatastore.getPostgresClient();
149+
PreparedStatement statement = connection.prepareStatement(deleteSQL)) {
150+
statement.executeUpdate();
151+
LOGGER.info("Cleared table: {}", FLAT_COLLECTION_NAME);
152+
} catch (Exception e) {
153+
LOGGER.error("Failed to clear table: {}", e.getMessage(), e);
154+
}
155+
}
156+
157+
@AfterEach
158+
public void cleanup() {
159+
// Data is cleared in @BeforeEach, but cleanup here for safety
160+
}
161+
162+
@AfterAll
163+
public static void shutdown() {
164+
postgres.stop();
165+
}
166+
167+
@Nested
168+
@DisplayName("Upsert Operations")
169+
class UpsertTests {
170+
171+
@Test
172+
@DisplayName("Should throw UnsupportedOperationException for upsert")
173+
void testUpsertNewDocument() {
174+
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
175+
objectNode.put("_id", 100);
176+
objectNode.put("item", "NewItem");
177+
objectNode.put("price", 99);
178+
Document document = new JSONDocument(objectNode);
179+
Key key = new SingleValueKey("default", "100");
180+
181+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.upsert(key, document));
182+
}
183+
184+
@Test
185+
@DisplayName("Should throw UnsupportedOperationException for upsertAndReturn")
186+
void testUpsertAndReturn() {
187+
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
188+
objectNode.put("_id", 100);
189+
objectNode.put("item", "NewItem");
190+
Document document = new JSONDocument(objectNode);
191+
Key key = new SingleValueKey("default", "100");
192+
193+
assertThrows(
194+
UnsupportedOperationException.class, () -> flatCollection.upsertAndReturn(key, document));
195+
}
196+
}
197+
198+
@Nested
199+
@DisplayName("Create Operations")
200+
class CreateTests {
201+
202+
@Test
203+
@DisplayName("Should throw UnsupportedOperationException for create")
204+
void testCreate() {
205+
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
206+
objectNode.put("_id", 300);
207+
objectNode.put("item", "Brand New Item");
208+
Document document = new JSONDocument(objectNode);
209+
Key key = new SingleValueKey("default", "300");
210+
211+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.create(key, document));
212+
}
213+
214+
@Test
215+
@DisplayName("Should throw UnsupportedOperationException for createOrReplace")
216+
void testCreateOrReplace() {
217+
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
218+
objectNode.put("_id", 200);
219+
objectNode.put("item", "NewMirror");
220+
Document document = new JSONDocument(objectNode);
221+
Key key = new SingleValueKey("default", "200");
222+
223+
assertThrows(
224+
UnsupportedOperationException.class, () -> flatCollection.createOrReplace(key, document));
225+
}
226+
227+
@Test
228+
@DisplayName("Should throw UnsupportedOperationException for createOrReplaceAndReturn")
229+
void testCreateOrReplaceAndReturn() {
230+
ObjectNode objectNode = OBJECT_MAPPER.createObjectNode();
231+
objectNode.put("_id", 200);
232+
objectNode.put("item", "NewMirror");
233+
Document document = new JSONDocument(objectNode);
234+
Key key = new SingleValueKey("default", "200");
235+
236+
assertThrows(
237+
UnsupportedOperationException.class,
238+
() -> flatCollection.createOrReplaceAndReturn(key, document));
239+
}
240+
}
241+
242+
@Nested
243+
@DisplayName("Bulk Operations")
244+
class BulkOperationTests {
245+
246+
@Test
247+
@DisplayName("Should throw UnsupportedOperationException for bulkUpsert")
248+
void testBulkUpsert() {
249+
Map<Key, Document> bulkMap = new HashMap<>();
250+
ObjectNode node = OBJECT_MAPPER.createObjectNode();
251+
node.put("_id", 101);
252+
node.put("item", "BulkItem101");
253+
bulkMap.put(new SingleValueKey("default", "101"), new JSONDocument(node));
254+
255+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.bulkUpsert(bulkMap));
256+
}
257+
258+
@Test
259+
@DisplayName("Should throw UnsupportedOperationException for bulkUpsertAndReturnOlderDocuments")
260+
void testBulkUpsertAndReturnOlderDocuments() {
261+
Map<Key, Document> bulkMap = new HashMap<>();
262+
ObjectNode node = OBJECT_MAPPER.createObjectNode();
263+
node.put("_id", 101);
264+
bulkMap.put(new SingleValueKey("default", "101"), new JSONDocument(node));
265+
266+
assertThrows(
267+
UnsupportedOperationException.class,
268+
() -> flatCollection.bulkUpsertAndReturnOlderDocuments(bulkMap));
269+
}
270+
}
271+
272+
@Nested
273+
@DisplayName("Delete Operations")
274+
class DeleteTests {
275+
276+
@Test
277+
@DisplayName("Should throw UnsupportedOperationException for delete by key")
278+
void testDeleteByKey() {
279+
Key keyToDelete = new SingleValueKey("default", "1");
280+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keyToDelete));
281+
}
282+
283+
@Test
284+
@DisplayName("Should throw UnsupportedOperationException for delete by filter")
285+
void testDeleteByFilter() {
286+
Filter filter = Filter.eq("item", "Soap");
287+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(filter));
288+
}
289+
290+
@Test
291+
@DisplayName("Should throw UnsupportedOperationException for delete by keys")
292+
void testDeleteByKeys() {
293+
Set<Key> keys =
294+
Set.of(new SingleValueKey("default", "1"), new SingleValueKey("default", "2"));
295+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.delete(keys));
296+
}
297+
298+
@Test
299+
@DisplayName("Should throw UnsupportedOperationException for deleteAll")
300+
void testDeleteAll() {
301+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.deleteAll());
302+
}
303+
}
304+
305+
@Nested
306+
@DisplayName("Update Operations")
307+
class UpdateTests {
308+
309+
@Test
310+
@DisplayName("Should throw UnsupportedOperationException for update with condition")
311+
void testUpdateWithCondition() {
312+
Key key = new SingleValueKey("default", "1");
313+
ObjectNode updatedNode = OBJECT_MAPPER.createObjectNode();
314+
updatedNode.put("_id", 1);
315+
updatedNode.put("item", "Soap");
316+
Document document = new JSONDocument(updatedNode);
317+
Filter condition = Filter.eq("price", 10);
318+
319+
assertThrows(
320+
UnsupportedOperationException.class,
321+
() -> flatCollection.update(key, document, condition));
322+
}
323+
}
324+
325+
@Nested
326+
@DisplayName("Drop Operations")
327+
class DropTests {
328+
329+
@Test
330+
@DisplayName("Should throw UnsupportedOperationException for drop")
331+
void testDrop() {
332+
assertThrows(UnsupportedOperationException.class, () -> flatCollection.drop());
333+
}
334+
}
335+
336+
@Nested
337+
@DisplayName("Sub-Document Operations")
338+
class SubDocumentTests {
339+
340+
@Test
341+
@DisplayName("Should throw UnsupportedOperationException for updateSubDoc")
342+
void testSubDocumentUpdate() {
343+
Key docKey = new SingleValueKey("default", "1");
344+
ObjectNode subDoc = OBJECT_MAPPER.createObjectNode();
345+
subDoc.put("newField", "newValue");
346+
Document subDocument = new JSONDocument(subDoc);
347+
348+
assertThrows(
349+
UnsupportedOperationException.class,
350+
() -> flatCollection.updateSubDoc(docKey, "props.nested", subDocument));
351+
}
352+
353+
@Test
354+
@DisplayName("Should throw UnsupportedOperationException for deleteSubDoc")
355+
void testSubDocumentDelete() {
356+
Key docKey = new SingleValueKey("default", "1");
357+
358+
assertThrows(
359+
UnsupportedOperationException.class,
360+
() -> flatCollection.deleteSubDoc(docKey, "props.brand"));
361+
}
362+
363+
@Test
364+
@DisplayName("Should throw UnsupportedOperationException for bulkUpdateSubDocs")
365+
void testBulkUpdateSubDocs() {
366+
Map<Key, Map<String, Document>> documents = new HashMap<>();
367+
Key key1 = new SingleValueKey("default", "1");
368+
Map<String, Document> subDocs1 = new HashMap<>();
369+
ObjectNode subDoc1 = OBJECT_MAPPER.createObjectNode();
370+
subDoc1.put("updated", true);
371+
subDocs1.put("props.status", new JSONDocument(subDoc1));
372+
documents.put(key1, subDocs1);
373+
374+
assertThrows(
375+
UnsupportedOperationException.class, () -> flatCollection.bulkUpdateSubDocs(documents));
376+
}
377+
}
378+
379+
@Nested
380+
@DisplayName("Bulk Array Value Operations")
381+
class BulkArrayValueOperationTests {
382+
383+
@Test
384+
@DisplayName("Should throw UnsupportedOperationException for bulkOperationOnArrayValue")
385+
void testBulkOperationOnArrayValue() throws IOException {
386+
Set<Key> keys =
387+
Set.of(new SingleValueKey("default", "1"), new SingleValueKey("default", "2"));
388+
List<Document> subDocs =
389+
List.of(new JSONDocument("\"newTag1\""), new JSONDocument("\"newTag2\""));
390+
BulkArrayValueUpdateRequest request =
391+
new BulkArrayValueUpdateRequest(
392+
keys, "tags", BulkArrayValueUpdateRequest.Operation.SET, subDocs);
393+
394+
assertThrows(
395+
UnsupportedOperationException.class,
396+
() -> flatCollection.bulkOperationOnArrayValue(request));
397+
}
398+
}
399+
}

0 commit comments

Comments
 (0)