Skip to content

Commit 17c8ad6

Browse files
committed
Fix: Add load/performance/extension test groups and align env/test expectations
- add bench_test and pxf_extension_test in run_tests.sh, plus matrix entries for bench and pxf_extension in CI - bump surefire heap to 4G to avoid OOM - update gpupgrade expected outputs to new PXF_HOME paths and JSON formatter error text - make ProtocolUtils/HiveBaseTest/JdbcHiveTest/OrcWriteTest/ParquetWriteTest more robust to env defaults (protocol, creds, hive JDBC URL) - keep MultiServerTest running under HDFS with a safe working directory fallback - set distribution key and INSERT pattern for performance test data load
1 parent 5394ead commit 17c8ad6

File tree

15 files changed

+148
-46
lines changed

15 files changed

+148
-46
lines changed

.github/workflows/pxf-ci.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,8 @@ jobs:
119119
- s3
120120
- features
121121
- gpdb
122+
- load
123+
- pxf_extension
122124
steps:
123125
- name: Free disk space
124126
run: |

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,4 @@ server/tmp
1616
/.vscode/settings.json
1717
/automation/dataTempFolder/
1818
/cli/go/pkg/
19+
/automation/test_artifacts

automation/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@
5959
<version>2.15</version>
6060
<configuration>
6161
<testFailureIgnore>true</testFailureIgnore>
62-
<argLine>-Xmx2048m -XX:MaxPermSize=512m</argLine>
62+
<argLine>-Xmx4096m</argLine>
6363
<forkCount>1</forkCount>
6464
<reuseForks>false</reuseForks>
6565
</configuration>

automation/sqlrepo/features/gpupgrade/extension2_0/step_1_before_running_pxf_pre_gpupgrade/expected/query01.ans

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -40,11 +40,11 @@ FROM pg_catalog.pg_extension AS e
4040
INNER JOIN pg_catalog.pg_proc AS p ON (p.oid = d.objid)
4141
WHERE d.deptype = 'e' AND e.extname = 'pxf'
4242
ORDER BY 1;
43-
proname | prosrc | probin
44-
--------------------+------------------------------+-------------
45-
pxf_read | pxfprotocol_import | $libdir/pxf
46-
pxf_validate | pxfprotocol_validate_urls | $libdir/pxf
47-
pxf_write | pxfprotocol_export | $libdir/pxf
48-
pxfwritable_export | gpdbwritableformatter_export | $libdir/pxf
49-
pxfwritable_import | gpdbwritableformatter_import | $libdir/pxf
43+
proname | prosrc | probin
44+
--------------------+------------------------------+----------------------------------
45+
pxf_read | pxfprotocol_import | $PXF_HOME/gpextable/pxf
46+
pxf_validate | pxfprotocol_validate_urls | $PXF_HOME/gpextable/pxf
47+
pxf_write | pxfprotocol_export | $PXF_HOME/gpextable/pxf
48+
pxfwritable_export | gpdbwritableformatter_export | $PXF_HOME/gpextable/pxf
49+
pxfwritable_import | gpdbwritableformatter_import | $PXF_HOME/gpextable/pxf
5050
(5 rows)

automation/sqlrepo/features/gpupgrade/extension2_1/step_1_before_running_pxf_pre_gpupgrade/expected/query01.ans

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,13 @@ FROM pg_catalog.pg_extension AS e
4040
INNER JOIN pg_catalog.pg_proc AS p ON (p.oid = d.objid)
4141
WHERE d.deptype = 'e' AND e.extname = 'pxf'
4242
ORDER BY 1;
43-
proname | prosrc | probin
44-
---------------------+------------------------------+-------------
45-
pxf_read | pxfprotocol_import | $libdir/pxf
46-
pxf_validate | pxfprotocol_validate_urls | $libdir/pxf
47-
pxf_write | pxfprotocol_export | $libdir/pxf
48-
pxfdelimited_import | pxfdelimited_import | $libdir/pxf
49-
pxfwritable_export | gpdbwritableformatter_export | $libdir/pxf
50-
pxfwritable_import | gpdbwritableformatter_import | $libdir/pxf
43+
proname | prosrc | probin
44+
---------------------+------------------------------+----------------------------------
45+
pxf_read | pxfprotocol_import | $PXF_HOME/gpextable/pxf
46+
pxf_validate | pxfprotocol_validate_urls | $PXF_HOME/gpextable/pxf
47+
pxf_write | pxfprotocol_export | $PXF_HOME/gpextable/pxf
48+
pxfdelimited_import | pxfdelimited_import | $PXF_HOME/gpextable/pxf
49+
pxfwritable_export | gpdbwritableformatter_export | $PXF_HOME/gpextable/pxf
50+
pxfwritable_import | gpdbwritableformatter_import | $PXF_HOME/gpextable/pxf
5151
(6 rows)
5252

automation/sqlrepo/features/hdfs/writable/json/invalid_encoding/expected/query01.ans

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,4 @@
77
-- end_matchsubs
88

99
INSERT INTO pxf_invalid_encoding_json_write SELECT * from gpdb_primitive_types;
10-
ERROR: gpdbwritable formatter can only export UTF8 formatted data. Define the external table with ENCODING UTF8
10+
ERROR: pxfwritable_export formatter can only export UTF8 formatted data. Define the external table with ENCODING UTF8

automation/sqlrepo/features/jdbc/session_params/expected/query01.ans

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
SELECT * FROM pxf_jdbc_read_view_no_params WHERE name='client_min_messages' OR name='default_statistics_target' ORDER BY name;
66
name | setting
77
---------------------------+---------
8-
client_min_messages | error
8+
client_min_messages | notice
99
default_statistics_target | 100
1010
(2 rows)
1111

automation/src/main/java/org/greenplum/pxf/automation/utils/system/ProtocolUtils.java

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,14 @@ public static ProtocolEnum getProtocol() {
1414

1515
ProtocolEnum result;
1616
try {
17-
result = ProtocolEnum.valueOf(System.getProperty(PROTOCOL_KEY, ProtocolEnum.HDFS.name()).toUpperCase());
17+
String protocol = System.getProperty(PROTOCOL_KEY);
18+
if (protocol == null) {
19+
protocol = System.getenv(PROTOCOL_KEY);
20+
}
21+
if (protocol == null) {
22+
protocol = ProtocolEnum.HDFS.name();
23+
}
24+
result = ProtocolEnum.valueOf(protocol.toUpperCase());
1825
} catch (Exception e) {
1926
result = ProtocolEnum.HDFS; // use HDFS as default mode
2027
}
@@ -23,15 +30,19 @@ public static ProtocolEnum getProtocol() {
2330
}
2431

2532
public static String getSecret() {
26-
return System.getProperty(AWS_SECRET_ACCESS_KEY);
33+
String secret = System.getProperty(AWS_SECRET_ACCESS_KEY);
34+
return secret != null ? secret : System.getenv(AWS_SECRET_ACCESS_KEY);
2735
}
2836

2937
public static String getAccess() {
30-
return System.getProperty(AWS_ACCESS_KEY_ID);
38+
String access = System.getProperty(AWS_ACCESS_KEY_ID);
39+
String result = access != null ? access : System.getenv(AWS_ACCESS_KEY_ID);
40+
return result;
3141
}
3242

3343
public static String getPxfTestKeepData() {
34-
return System.getProperty(PXF_TEST_KEEP_DATA, "false");
44+
String keepData = System.getProperty(PXF_TEST_KEEP_DATA);
45+
return keepData != null ? keepData : System.getenv().getOrDefault(PXF_TEST_KEEP_DATA, "false");
3546
}
3647

3748

automation/src/test/java/org/greenplum/pxf/automation/features/jdbc/JdbcHiveTest.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ protected void prepareData(Hive hive, Hdfs hdfs, String hiveTypesFileName) throw
166166
}
167167

168168
protected void createTables(Hive hive, String serverName, String gpdbTypesTableName, String gpdbQueryTableName) throws Exception {
169-
String jdbcUrl = HIVE_JDBC_URL_PREFIX + hive.getHost() + ":10000/default;auth=noSasl";
169+
String jdbcUrl = HIVE_JDBC_URL_PREFIX + hive.getHost() + ":10000/default";
170170
String user = null;
171171

172172
// On kerberized cluster, enabled then we need the hive/hiveserver2_hostname principal in the connection string.
@@ -219,7 +219,7 @@ protected void createTablesForWriteTest(Hive hive, String hiverServerName, Strin
219219
hiveReadable = TableFactory.getPxfJdbcReadableTable(
220220
hiveReadableName, GPDB_WRITE_TYPES_TABLE_FIELDS, targetHiveTable.getFullName(), serverName);
221221
} else {
222-
String jdbcUrl = String.format("%s%s:10000/default;auth=noSasl", HIVE_JDBC_URL_PREFIX, hive.getHost());
222+
String jdbcUrl = String.format("%s%s:10000/default", HIVE_JDBC_URL_PREFIX, hive.getHost());
223223
// create GPDB external table for writing data from GPDB to Hive with JDBC profile
224224
hiveWritable = TableFactory.getPxfJdbcWritableTable(
225225
hiveWritableName, GPDB_WRITE_TYPES_TABLE_FIELDS, targetHiveTable.getFullName(),

automation/src/test/java/org/greenplum/pxf/automation/features/multiserver/MultiServerTest.java

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -50,9 +50,6 @@ public class MultiServerTest extends BaseFeature {
5050
*/
5151
@Override
5252
public void beforeClass() throws Exception {
53-
if (ProtocolUtils.getProtocol() == ProtocolEnum.HDFS) {
54-
return;
55-
}
5653
// Initialize an additional HDFS system object (optional system object)
5754
hdfs2 = (Hdfs) systemManager.
5855
getSystemObject("/sut", "hdfs2", -1, null, false, null, SutFactory.getInstance().getSutInstance());
@@ -71,6 +68,10 @@ public void beforeClass() throws Exception {
7168
}
7269

7370
String hdfsWorkingDirectory = hdfs.getWorkingDirectory();
71+
if (hdfsWorkingDirectory == null) {
72+
// Fallback to the default automation working directory to avoid NPE when protocol is HDFS
73+
hdfsWorkingDirectory = "/tmp/pxf_automation_data";
74+
}
7475
defaultPath = hdfsWorkingDirectory + "/" + fileName;
7576

7677
// Initialize server objects

0 commit comments

Comments
 (0)