Skip to content
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,146 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.testing.embedded.indexing;

import com.fasterxml.jackson.core.type.TypeReference;
import org.apache.druid.indexing.common.task.TaskBuilder;
import org.apache.druid.java.util.common.jackson.JacksonUtils;
import org.apache.druid.query.http.ClientSqlQuery;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.TestDataSource;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.nested.NestedCommonFormatColumnFormatSpec;
import org.apache.druid.segment.nested.ObjectStorageEncoding;
import org.apache.druid.testing.embedded.EmbeddedBroker;
import org.apache.druid.testing.embedded.EmbeddedClusterApis;
import org.apache.druid.testing.embedded.EmbeddedCoordinator;
import org.apache.druid.testing.embedded.EmbeddedDruidCluster;
import org.apache.druid.testing.embedded.EmbeddedHistorical;
import org.apache.druid.testing.embedded.EmbeddedIndexer;
import org.apache.druid.testing.embedded.EmbeddedOverlord;
import org.apache.druid.testing.embedded.EmbeddedRouter;
import org.apache.druid.testing.embedded.junit5.EmbeddedClusterTestBase;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;

import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.Map;

/**
* Embedded tests for nested data, ingested in different {@link NestedCommonFormatColumnFormatSpec}.
*/
public class NestedDataFormatsTest extends EmbeddedClusterTestBase
{
private final EmbeddedBroker broker = new EmbeddedBroker();
private final EmbeddedOverlord overlord = new EmbeddedOverlord();
private final EmbeddedCoordinator coordinator = new EmbeddedCoordinator();

private final String defaultFormat = "koala_default";

@Override
protected EmbeddedDruidCluster createCluster()
{
return EmbeddedDruidCluster.withEmbeddedDerbyAndZookeeper()
.useLatchableEmitter()
.useDefaultTimeoutForLatchableEmitter(60)
.addServer(overlord)
.addServer(coordinator)
.addServer(new EmbeddedIndexer())
.addServer(new EmbeddedHistorical())
.addServer(broker)
.addServer(new EmbeddedRouter());
}

@BeforeAll
protected void ingestWithDefaultFormat() throws Exception
{
final TaskBuilder.IndexParallel indexTask =
TaskBuilder.ofTypeIndexParallel()
.dataSource(defaultFormat)
.timestampColumn("timestamp")
.jsonInputFormat()
.inputSource(Resources.HttpData.kttm1Day())
.schemaDiscovery();

final String taskId = EmbeddedClusterApis.newTaskId(defaultFormat);
cluster.callApi().runTask(indexTask.withId(taskId), overlord);
cluster.callApi().waitForAllSegmentsToBeAvailable(defaultFormat, coordinator, broker);
}

@Test
public void test_objectStorageEncoding()
{
// Ingest kttm data with skipping smile raw json format, comparing diff with defaultFormat
NestedCommonFormatColumnFormatSpec spec =
NestedCommonFormatColumnFormatSpec.builder().setObjectStorageEncoding(ObjectStorageEncoding.NONE).build();
final TaskBuilder.IndexParallel indexTask =
TaskBuilder.ofTypeIndexParallel()
.dataSource(dataSource)
.timestampColumn("timestamp")
.jsonInputFormat()
.inputSource(Resources.HttpData.kttm1Day())
.schemaDiscovery()
.tuningConfig(t -> t.withIndexSpec(IndexSpec.builder().withAutoColumnFormatSpec(spec).build()));
final String taskId = EmbeddedClusterApis.newTaskId(dataSource);
cluster.callApi().runTask(indexTask.withId(taskId), overlord);
cluster.callApi().waitForAllSegmentsToBeAvailable(dataSource, coordinator, broker);

final String resultAsJson =
cluster.callApi().onAnyBroker(b -> b.submitSqlQuery(ClientSqlQuery.simple("select * from sys.segments")));
List<Map<String, Object>> result = JacksonUtils.readValue(
TestHelper.JSON_MAPPER,
resultAsJson.getBytes(StandardCharsets.UTF_8),
new TypeReference<>()
{
}
);
Map<String, Object> defaultFormatResult =
result.stream().filter(map -> defaultFormat.equals(map.get("datasource"))).findFirst().get();
Map<String, Object> noneObjectStorageFormatResult =
result.stream().filter(map -> dataSource.equals(map.get("datasource"))).findFirst().get();
// Test ingesting with skipping raw json smile format works, same row count, with ~20% storage saving
Assertions.assertEquals(465_346, defaultFormatResult.get("num_rows"));
Assertions.assertEquals(53_000_804, defaultFormatResult.get("size"));
Assertions.assertEquals(465_346, noneObjectStorageFormatResult.get("num_rows"));
Assertions.assertEquals(41_938_750, noneObjectStorageFormatResult.get("size"));

// Test querying on a nested field works
final String groupByQuery = "select json_value(event, '$.type') as event_type, count(*) as total from %s group by 1 order by 2 desc, 1 asc limit 10";
final String queryResultDefaultFormat = cluster.callApi().runSql(groupByQuery, defaultFormat);
final String queryResultNoneObjectStorage = cluster.callApi().runSql(groupByQuery, dataSource);
Assertions.assertEquals(queryResultDefaultFormat, queryResultNoneObjectStorage);

// Test reconstruct json column works, the ordering of the fields has changed, but all values are perserved.
final String scanQuery = "select event, to_json_string(agent) as agent from %s where json_value(event, '$.type') = 'PercentClear' and json_value(agent, '$.os') = 'Android' order by __time asc limit 1";
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please try to break the SQL string into multiple lines.

final String scanQueryResultDefaultFormat = cluster.callApi().runSql(scanQuery, defaultFormat);
final String scanQueryResultNoneObjectStorage = cluster.callApi().runSql(scanQuery, dataSource);
// CHECKSTYLE: text blocks not supported in current Checkstyle version
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's consider updating the checkstyle version in that case.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

only later version of checkstyle support the """ style (java 15+), and had to exclude this file since the error thrown was parsing error. in the later version of checkstyle a bunch of other things failed as well, so prefer to make checkstyle a separate pr besides this one.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Okay, thanks for the details.

I think you can also use // CHECKSTYLE.OFF to turn off checkstyle without having to exclude the entire file in pom. e.g. see StringUtils class.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it still complains about error, i think parsing was before style check

Failed to execute goal org.apache.maven.plugins:maven-checkstyle-plugin:3.0.0:checkstyle (default-cli) on project druid-embedded-tests: An error has occurred in Checkstyle report generation.: Failed during checkstyle configuration: Exception was thrown while processing /Users/cecemei/codespace/bitmap/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/NestedDataFormatsTest.java: IllegalStateException occurred while parsing file /Users/cecemei/codespace/bitmap/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/NestedDataFormatsTest.java. /Users/cecemei/codespace/bitmap/embedded-tests/src/test/java/org/apache/druid/testing/embedded/indexing/NestedDataFormatsTest.java:118:5: unexpected token: Assertions -> [Help 1]

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see, thanks for checking. Let's just exclude it in the pom until we upgrade the checkstyle version in the follow up PR.

Assertions.assertEquals(
"""
"{""type"":""PercentClear"",""percentage"":85}","{""type"":""Mobile Browser"",""category"":""Smartphone"",""browser"":""Chrome Mobile"",""browser_version"":""50.0.2661.89"",""os"":""Android"",""platform"":""Android""}"
""".trim(), scanQueryResultDefaultFormat);
Assertions.assertEquals(
"""
"{""percentage"":85,""type"":""PercentClear""}","{""browser"":""Chrome Mobile"",""browser_version"":""50.0.2661.89"",""category"":""Smartphone"",""os"":""Android"",""platform"":""Android"",""type"":""Mobile Browser""}"
""".trim(), scanQueryResultNoneObjectStorage);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,9 @@
* explicitly.
*
* @param <Self> Type of this builder itself
* @param <C> Type of tuning config used by this builder.
* @param <T> Type of task created by this builder.
* @param <CB> Type of tuning config builder
* @param <C> Type of tuning config used by this builder.
* @param <T> Type of task created by this builder.
* @param <CB> Type of tuning config builder
* @see #ofTypeIndex()
* @see #tuningConfig(Consumer) to specify the {@code tuningConfig}.
*/
Expand Down Expand Up @@ -286,6 +286,12 @@ public Self dimensions(String... dimensions)
return (Self) this;
}

public Self schemaDiscovery()
{
dataSchema.withDimensions(DimensionsSpec.builder().useSchemaDiscovery(true).build());
return (Self) this;
}

public Self metricAggregates(AggregatorFactory... aggregators)
{
dataSchema.withAggregators(aggregators);
Expand Down
4 changes: 2 additions & 2 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
</scm>

<properties>
<java.version>11</java.version>
<java.version>17</java.version>
<maven.compiler.release>${java.version}</maven.compiler.release>
<project.build.resourceEncoding>UTF-8</project.build.resourceEncoding>
<aether.version>0.9.0.M2</aether.version>
Expand Down Expand Up @@ -1572,7 +1572,7 @@
<consoleOutput>true</consoleOutput>
<failsOnError>true</failsOnError>
<excludes>
*com/fasterxml/jackson/databind/*
*com/fasterxml/jackson/databind/*,**/NestedDataFormatsTest.java
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why is the new test excluded from checkstyle?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

parsing error

</excludes>
</configuration>
<dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,11 @@ public ClientSqlQuery(
this.parameters = parameters;
}

public static ClientSqlQuery simple(String query)
{
return new ClientSqlQuery(query, null, false, false, false, Map.of(), List.of());
}

public String getQuery()
{
return query;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@
import org.junit.Assert;
import org.junit.Test;

import java.util.List;
import java.util.Map;

public class ClientSqlQueryTest
{
@Test
Expand Down Expand Up @@ -59,4 +62,11 @@ public void testSerdeWithParameters() throws JsonProcessingException
);
Assert.assertEquals(query, jsonMapper.readValue(jsonMapper.writeValueAsString(query), ClientSqlQuery.class));
}

@Test
public void testSimple()
{
final ClientSqlQuery query = ClientSqlQuery.simple("select 1");
Assert.assertEquals(new ClientSqlQuery("select 1", null, false, false, false, Map.of(), List.of()), query);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
*/
public final class TestDataSource
{
public static final String PREFIX = "datasource";
public static final String WIKI = "wiki";
public static final String KOALA = "koala";
}
Original file line number Diff line number Diff line change
Expand Up @@ -412,11 +412,11 @@ public SupervisorStatus getSupervisorStatus(String supervisorId)
// STATIC UTILITY METHODS

/**
* Creates a random datasource name prefixed with {@link TestDataSource#WIKI}.
* Creates a random datasource name prefixed with {@link TestDataSource#PREFIX}.
*/
public static String createTestDatasourceName()
{
return TestDataSource.WIKI + "_" + IdUtils.getRandomId();
return TestDataSource.PREFIX + "_" + IdUtils.getRandomId();
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,23 @@ public static HttpInputSource wikipedia1Day()
throw new RuntimeException(e);
}
}

public static HttpInputSource kttm1Day()
{
try {
return new HttpInputSource(
List.of(new URIBuilder("https://static.imply.io/example-data/kttm-nested-v2/kttm-nested-v2-2019-08-25.json.gz").build()),
null,
null,
null,
null,
new HttpInputSourceConfig(null, null)
);
}
catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}

/**
Expand Down
Loading