Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ public final class ColumnMetaDataLoader {

private static final String IS_NULLABLE = "IS_NULLABLE";

private static final String TYPE_NAME = "TYPE_NAME";

/**
* Load column meta data list.
*
Expand All @@ -66,6 +68,7 @@ public static Collection<ColumnMetaData> load(final Connection connection, final
Collection<String> primaryKeys = loadPrimaryKeys(connection, tableNamePattern);
List<String> columnNames = new ArrayList<>();
List<Integer> columnTypes = new ArrayList<>();
List<String> columnTypeNames = new ArrayList<>();
List<Boolean> primaryKeyFlags = new ArrayList<>();
List<Boolean> caseSensitiveFlags = new ArrayList<>();
List<Boolean> nullableFlags = new ArrayList<>();
Expand All @@ -75,6 +78,7 @@ public static Collection<ColumnMetaData> load(final Connection connection, final
if (Objects.equals(tableNamePattern, tableName)) {
String columnName = resultSet.getString(COLUMN_NAME);
columnTypes.add(resultSet.getInt(DATA_TYPE));
columnTypeNames.add(resultSet.getString(TYPE_NAME));
primaryKeyFlags.add(primaryKeys.contains(columnName));
nullableFlags.add("YES".equals(resultSet.getString(IS_NULLABLE)));
columnNames.add(columnName);
Expand All @@ -87,8 +91,10 @@ public static Collection<ColumnMetaData> load(final Connection connection, final
ResultSet resultSet = statement.executeQuery(emptyResultSQL)) {
for (int i = 0; i < columnNames.size(); i++) {
boolean generated = resultSet.getMetaData().isAutoIncrement(i + 1);

caseSensitiveFlags.add(resultSet.getMetaData().isCaseSensitive(resultSet.findColumn(columnNames.get(i))));
result.add(new ColumnMetaData(columnNames.get(i), columnTypes.get(i), primaryKeyFlags.get(i), generated, caseSensitiveFlags.get(i), true, false, nullableFlags.get(i)));
result.add(new ColumnMetaData(columnNames.get(i), columnTypes.get(i), primaryKeyFlags.get(i), generated, columnTypeNames.get(i), caseSensitiveFlags.get(i), true, false,
nullableFlags.get(i)));
}
} catch (final SQLException ex) {
log.error("Error occurred while loading column meta data, SQL: {}", emptyResultSQL, ex);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,8 @@ public final class ColumnMetaData {

private final boolean generated;

private final String typeName;

private final boolean caseSensitive;

private final boolean visible;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
package org.apache.shardingsphere.database.connector.core.metadata.database.datatype;

import com.cedarsoftware.util.CaseInsensitiveMap;
import org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.datatype.DialectDataTypeOption;
import org.apache.shardingsphere.database.connector.core.type.DatabaseType;
import org.apache.shardingsphere.database.connector.core.type.DatabaseTypeRegistry;

Expand All @@ -41,7 +42,10 @@ public final class DataTypeLoader {
*/
public Map<String, Integer> load(final DatabaseMetaData databaseMetaData, final DatabaseType databaseType) throws SQLException {
Map<String, Integer> result = loadStandardDataTypes(databaseMetaData);
result.putAll(new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().getDataTypeOption().getExtraDataTypes());
DialectDataTypeOption dataTypeOption = new DatabaseTypeRegistry(databaseType).getDialectDatabaseMetaData().getDataTypeOption();
result.putAll(dataTypeOption.getExtraDataTypes());
result.putAll(dataTypeOption.loadUDTTypes(databaseMetaData.getConnection()));

return result;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,11 @@

package org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.datatype;

import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;

Expand Down Expand Up @@ -76,4 +79,9 @@ public boolean isBinaryDataType(final int sqlType) {
return false;
}
}

@Override
public Map<String, Integer> loadUDTTypes(Connection connection) throws SQLException {
return new HashMap<>();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,9 @@

package org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.datatype;

import java.sql.Connection;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;

Expand Down Expand Up @@ -65,4 +68,7 @@ public interface DialectDataTypeOption {
* @return is binary type or not
*/
boolean isBinaryDataType(int sqlType);

Map<String, Integer> loadUDTTypes(Connection connection) throws SQLException;

}
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ private ColumnMetaData loadColumnMetaData(final Collection<String> primaryKeys,
// TODO user defined collation which deterministic is false
boolean caseSensitive = true;
boolean isNullable = "YES".equals(resultSet.getString("is_nullable"));
return new ColumnMetaData(columnName, DataTypeRegistry.getDataType(getDatabaseType(), dataType).orElse(Types.OTHER), isPrimaryKey, generated, caseSensitive, true, false, isNullable);
return new ColumnMetaData(columnName, DataTypeRegistry.getDataType(getDatabaseType(), dataType).orElse(Types.OTHER), isPrimaryKey, generated, dataType, caseSensitive, true, false, isNullable);
}

private Map<String, Multimap<String, ConstraintMetaData>> loadConstraintMetaDataMap(final Connection connection, final Collection<String> schemaNames) throws SQLException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.datatype.DefaultDataTypeOption;
import org.apache.shardingsphere.database.connector.core.metadata.database.metadata.option.datatype.DialectDataTypeOption;

import java.sql.Types;
import java.sql.*;
import java.util.Map;
import java.util.Optional;

Expand Down Expand Up @@ -49,6 +49,8 @@ private static Map<String, Integer> setUpExtraDataTypes() {
result.put("REAL", Types.REAL);
result.put("BOOL", Types.BOOLEAN);
result.put("CHARACTER VARYING", Types.VARCHAR);
result.put("VARBIT", Types.OTHER); // Keep as OTHER but let column type detection handle it
result.put("BIT VARYING", Types.OTHER);
return result;
}

Expand Down Expand Up @@ -79,4 +81,32 @@ public boolean isStringDataType(final int sqlType) {
public boolean isBinaryDataType(final int sqlType) {
return delegate.isBinaryDataType(sqlType);
}

@Override
public Map<String, Integer> loadUDTTypes(Connection connection) throws SQLException {
Map<String, Integer> result = new CaseInsensitiveMap<>();

String sql =
"SELECT\n" +
" t.typname AS udt_name,\n" +
" t.typtype AS udt_kind,\n" +
" n.nspname AS schema_name\n" +
"FROM pg_type t\n" +
" JOIN pg_namespace n ON n.oid = t.typnamespace\n" +
" LEFT JOIN pg_class c ON c.oid = t.typrelid\n" +
"WHERE\n" +
" n.nspname = 'public'\n" +
" AND t.typtype IN ('c', 'e', 'd') -- 复合类型、枚举、domain\n" +
" AND (c.relkind IS NULL OR c.relkind = 'c') -- 过滤掉 table rowtype (r)\n" +
"ORDER BY udt_name;";

try (
PreparedStatement ps = connection.prepareStatement(sql);
ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
result.put(rs.getString("udt_name"), Types.OTHER);
}
}
return result;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,21 +23,9 @@
import org.apache.shardingsphere.database.protocol.binary.BinaryColumnType;
import org.apache.shardingsphere.database.protocol.postgresql.exception.PostgreSQLProtocolException;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.PostgreSQLTextValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLBitValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLBoolValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLDateValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLDoubleValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLFloatValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLIntValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLJsonValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLLongValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLNumericValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLTextArrayValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLTimeValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLTimestampValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLUnspecifiedValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLVarcharArrayValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.PostgreSQLVarcharValueParser;
import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Types;
import java.util.HashMap;
Expand Down Expand Up @@ -142,7 +130,7 @@ public enum PostgreSQLColumnType implements BinaryColumnType {

CHAR_ARRAY(1002, new PostgreSQLVarcharValueParser()),

VARBIT(1562, new PostgreSQLVarcharValueParser()),
VARBIT(1562, new PostgreSQLVarBitValueParser()),

VARBIT_ARRAY(1563, new PostgreSQLVarcharValueParser()),

Expand All @@ -160,6 +148,8 @@ public enum PostgreSQLColumnType implements BinaryColumnType {

BOX(603, new PostgreSQLVarcharValueParser()),

JSONB(3802, new PostgreSQLJsonBValueParser()),

JSONB_ARRAY(3807, new PostgreSQLVarcharValueParser()),

JSON(114, new PostgreSQLJsonValueParser()),
Expand Down Expand Up @@ -200,6 +190,10 @@ public enum PostgreSQLColumnType implements BinaryColumnType {
JDBC_TYPE_AND_COLUMN_TYPE_MAP.put(Types.ARRAY, TEXT_ARRAY);
}

private static boolean isVarbit(final int jdbcType, final String columnTypeName) {
return Types.OTHER == jdbcType && ("varbit".equalsIgnoreCase(columnTypeName) || "bit varying".equalsIgnoreCase(columnTypeName));
}

/**
* Value of JDBC type.
*
Expand Down Expand Up @@ -228,6 +222,15 @@ public static PostgreSQLColumnType valueOfJDBCType(final int jdbcType, final Str
if (isUUID(jdbcType, columnTypeName)) {
return UUID;
}
if (isVarbit(jdbcType, columnTypeName)) {
return VARBIT;
}
if (isJSON(jdbcType, columnTypeName)) {
return JSON;
}
if (isJSONB(jdbcType, columnTypeName)) {
return JSONB;
}
return valueOfJDBCType(jdbcType);
}

Expand Down Expand Up @@ -279,4 +282,12 @@ public static PostgreSQLColumnType valueOf(final int value) {
}
throw new PostgreSQLProtocolException("Can not find value `%s` in PostgreSQL column type.", value);
}

private static boolean isJSON(final int jdbcType, final String columnTypeName) {
return Types.OTHER == jdbcType && "json".equalsIgnoreCase(columnTypeName);
}

private static boolean isJSONB(final int jdbcType, final String columnTypeName) {
return Types.OTHER == jdbcType && "jsonb".equalsIgnoreCase(columnTypeName);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl;

import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.PostgreSQLTextValueParser;
import org.apache.shardingsphere.infra.exception.external.sql.type.wrapper.SQLWrapperException;
import org.postgresql.util.PGobject;

import java.sql.SQLException;

/**
* Json value parser of PostgreSQL.
*/
public final class PostgreSQLJsonBValueParser implements PostgreSQLTextValueParser<PGobject> {

@Override
public PGobject parse(final String value) {
try {
PGobject result = new PGobject();
result.setType("jsonb");
result.setValue(value);
return result;
} catch (final SQLException ex) {
throw new SQLWrapperException(ex);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.impl;

import org.apache.shardingsphere.database.protocol.postgresql.packet.command.query.extended.bind.protocol.text.PostgreSQLTextValueParser;
import org.apache.shardingsphere.infra.exception.external.sql.type.wrapper.SQLWrapperException;
import org.postgresql.util.PGobject;

import java.sql.SQLException;

/**
* Bit value parser of PostgreSQL.
*/
public final class PostgreSQLVarBitValueParser implements PostgreSQLTextValueParser<PGobject> {

@Override
public PGobject parse(final String value) {
try {
PGobject result = new PGobject();
result.setType("varbit");
result.setValue(value);
return result;
} catch (final SQLException ex) {
throw new SQLWrapperException(ex);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ public final class ShardingSphereColumn {

private final boolean generated;

private final String typeName;

private final boolean caseSensitive;

private final boolean visible;
Expand All @@ -47,7 +49,7 @@ public final class ShardingSphereColumn {
private final boolean nullable;

public ShardingSphereColumn(final ColumnMetaData columnMetaData) {
this(columnMetaData.getName(), columnMetaData.getDataType(), columnMetaData.isPrimaryKey(), columnMetaData.isGenerated(),
this(columnMetaData.getName(), columnMetaData.getDataType(), columnMetaData.isPrimaryKey(), columnMetaData.isGenerated(), columnMetaData.getTypeName(),
columnMetaData.isCaseSensitive(), columnMetaData.isVisible(), columnMetaData.isUnsigned(), columnMetaData.isNullable());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ public Collection<ColumnMetaData> revise(final String tableName, final Collectio
}
String name = nameReviser.isPresent() ? nameReviser.get().revise(each.getName()) : each.getName();
boolean generated = generatedReviser.map(optional -> optional.revise(each)).orElseGet(each::isGenerated);
result.add(new ColumnMetaData(name, each.getDataType(), each.isPrimaryKey(), generated, each.isCaseSensitive(), each.isVisible(), each.isUnsigned(), each.isNullable()));
result.add(
new ColumnMetaData(name, each.getDataType(), each.isPrimaryKey(), generated, each.getTypeName(), each.isCaseSensitive(), each.isVisible(), each.isUnsigned(), each.isNullable()));
}
return result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ public final class YamlShardingSphereColumn implements YamlConfiguration {

private boolean generated;

private String typeName;

private boolean caseSensitive;

private boolean visible;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ public YamlShardingSphereColumn swapToYamlConfiguration(final ShardingSphereColu
result.setDataType(data.getDataType());
result.setPrimaryKey(data.isPrimaryKey());
result.setGenerated(data.isGenerated());
result.setTypeName(data.getTypeName());
result.setCaseSensitive(data.isCaseSensitive());
result.setVisible(data.isVisible());
result.setUnsigned(data.isUnsigned());
Expand All @@ -43,6 +44,6 @@ public YamlShardingSphereColumn swapToYamlConfiguration(final ShardingSphereColu
@Override
public ShardingSphereColumn swapToObject(final YamlShardingSphereColumn yamlConfig) {
return new ShardingSphereColumn(yamlConfig.getName(), yamlConfig.getDataType(),
yamlConfig.isPrimaryKey(), yamlConfig.isGenerated(), yamlConfig.isCaseSensitive(), yamlConfig.isVisible(), yamlConfig.isUnsigned(), yamlConfig.isNullable());
yamlConfig.isPrimaryKey(), yamlConfig.isGenerated(), yamlConfig.getTypeName(), yamlConfig.isCaseSensitive(), yamlConfig.isVisible(), yamlConfig.isUnsigned(), yamlConfig.isNullable());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,8 @@ public final class PostgreSQLServerPreparedStatement implements ServerPreparedSt

private final List<PostgreSQLColumnType> parameterTypes;

private final List<String> parameterTypeNames;

private final List<Integer> actualParameterMarkerIndexes;

@Getter(AccessLevel.NONE)
Expand Down
Loading
Loading