未验证 提交 57ac81aa 编写于 作者: L Liang Zhang 提交者: GitHub

Fix compile error for #5573 (#5575)

* fix compile error

* rename variable name from logicSchema to schema
上级 4d4c3463
......@@ -17,9 +17,9 @@
package org.apache.shardingsphere.example.shadow.table.raw.jdbc.config;
import org.apache.shardingsphere.api.config.shadow.ShadowRuleConfiguration;
import org.apache.shardingsphere.example.config.ExampleConfiguration;
import org.apache.shardingsphere.example.core.api.DataSourceUtil;
import org.apache.shardingsphere.shadow.api.config.ShadowRuleConfiguration;
import org.apache.shardingsphere.shardingjdbc.api.ShardingSphereDataSourceFactory;
import javax.sql.DataSource;
......@@ -28,20 +28,18 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public class ShadowDatabasesConfiguration implements ExampleConfiguration {
public final class ShadowDatabasesConfiguration implements ExampleConfiguration {
@Override
public DataSource getDataSource() {
ShadowRuleConfiguration shadowRuleConfiguration = new ShadowRuleConfiguration();
shadowRuleConfiguration.setColumn("shadow");
shadowRuleConfiguration.setShadowMappings(Collections.singletonMap("ds", "ds_0"));
ShadowRuleConfiguration shadowRuleConfiguration = new ShadowRuleConfiguration("shadow", Collections.singletonMap("ds", "ds_0"));
Map<String, DataSource> dataSourceMap = new HashMap<>();
dataSourceMap.put("ds", DataSourceUtil.createDataSource("demo_ds"));
dataSourceMap.put("ds_0", DataSourceUtil.createDataSource("shadow_demo_ds"));
try {
return ShardingSphereDataSourceFactory.createDataSource(dataSourceMap, Collections.singleton(shadowRuleConfiguration), null);
} catch (SQLException e) {
e.printStackTrace();
} catch (final SQLException ex) {
ex.printStackTrace();
return null;
}
}
......
......@@ -30,9 +30,9 @@ import java.util.Map;
@Getter
public final class ShadowRuleConfiguration implements RuleConfiguration {
private String column;
private final String column;
private Map<String, String> shadowMappings;
private final Map<String, String> shadowMappings;
public ShadowRuleConfiguration(final String column, final Map<String, String> shadowMappings) {
Preconditions.checkArgument(!Strings.isNullOrEmpty(column), "Column is required.");
......
......@@ -48,25 +48,25 @@ public final class DatabaseCommunicationEngineFactory {
/**
* Create new instance of text protocol backend handler.
*
* @param logicSchema logic schema
* @param schema ShardingSphere schema
* @param sql SQL to be executed
* @param backendConnection backend connection
* @return instance of text protocol backend handler
*/
public DatabaseCommunicationEngine newTextProtocolInstance(final ShardingSphereSchema logicSchema, final String sql, final BackendConnection backendConnection) {
return new JDBCDatabaseCommunicationEngine(logicSchema, sql, new JDBCExecuteEngine(backendConnection, new StatementExecutorWrapper(logicSchema)));
public DatabaseCommunicationEngine newTextProtocolInstance(final ShardingSphereSchema schema, final String sql, final BackendConnection backendConnection) {
return new JDBCDatabaseCommunicationEngine(schema, sql, new JDBCExecuteEngine(backendConnection, new StatementExecutorWrapper(schema)));
}
/**
* Create new instance of text protocol backend handler.
*
* @param logicSchema logic schema
* @param schema ShardingSphere schema
* @param sql SQL to be executed
* @param parameters SQL parameters
* @param backendConnection backend connection
* @return instance of text protocol backend handler
*/
public DatabaseCommunicationEngine newBinaryProtocolInstance(final ShardingSphereSchema logicSchema, final String sql, final List<Object> parameters, final BackendConnection backendConnection) {
return new JDBCDatabaseCommunicationEngine(logicSchema, sql, new JDBCExecuteEngine(backendConnection, new PreparedStatementExecutorWrapper(logicSchema, parameters)));
public DatabaseCommunicationEngine newBinaryProtocolInstance(final ShardingSphereSchema schema, final String sql, final List<Object> parameters, final BackendConnection backendConnection) {
return new JDBCDatabaseCommunicationEngine(schema, sql, new JDBCExecuteEngine(backendConnection, new PreparedStatementExecutorWrapper(schema, parameters)));
}
}
......@@ -56,7 +56,7 @@ import java.util.Optional;
@RequiredArgsConstructor
public final class JDBCDatabaseCommunicationEngine implements DatabaseCommunicationEngine {
private final ShardingSphereSchema logicSchema;
private final ShardingSphereSchema schema;
private final String sql;
......@@ -88,10 +88,7 @@ public final class JDBCDatabaseCommunicationEngine implements DatabaseCommunicat
return new ErrorResponse(new TableModifyInTransactionException(getTableName(sqlStatementContext)));
}
response = executeEngine.execute(executionContext);
// TODO refresh non-sharding table meta data
if (logicSchema instanceof ShardingSphereSchema) {
logicSchema.refreshTableMetaData(executionContext.getSqlStatementContext());
}
schema.refreshTableMetaData(executionContext.getSqlStatementContext());
return merge(executionContext.getSqlStatementContext());
}
......@@ -126,13 +123,13 @@ public final class JDBCDatabaseCommunicationEngine implements DatabaseCommunicat
}
private boolean isNeedAccumulate(final SQLStatementContext sqlStatementContext) {
Optional<DataNodeRoutedRule> dataNodeRoutedRule = logicSchema.getRules().stream().filter(each -> each instanceof DataNodeRoutedRule).findFirst().map(rule -> (DataNodeRoutedRule) rule);
Optional<DataNodeRoutedRule> dataNodeRoutedRule = schema.getRules().stream().filter(each -> each instanceof DataNodeRoutedRule).findFirst().map(rule -> (DataNodeRoutedRule) rule);
return dataNodeRoutedRule.isPresent() && dataNodeRoutedRule.get().isNeedAccumulate(sqlStatementContext.getTablesContext().getTableNames());
}
private MergedResult mergeQuery(final SQLStatementContext sqlStatementContext, final List<QueryResult> queryResults) throws SQLException {
MergeEngine mergeEngine = new MergeEngine(ShardingSphereSchemas.getInstance().getDatabaseType(),
logicSchema.getMetaData().getSchema().getConfiguredSchemaMetaData(), ShardingProxyContext.getInstance().getProperties(), logicSchema.getRules());
schema.getMetaData().getSchema().getConfiguredSchemaMetaData(), ShardingProxyContext.getInstance().getProperties(), schema.getRules());
return mergeEngine.merge(queryResults, sqlStatementContext);
}
......
......@@ -61,7 +61,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
private volatile String schemaName;
private ShardingSphereSchema logicSchema;
private ShardingSphereSchema schema;
private TransactionType transactionType;
......@@ -112,7 +112,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
}
/**
* Change logic schema of current channel.
* Change schema of current channel.
*
* @param schemaName schema name
*/
......@@ -121,7 +121,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
throw new ShardingSphereException("Failed to switch schema, please terminate current transaction.");
}
this.schemaName = schemaName;
this.logicSchema = ShardingSphereSchemas.getInstance().getSchema(schemaName);
this.schema = ShardingSphereSchemas.getInstance().getSchema(schemaName);
}
@SneakyThrows(InterruptedException.class)
......@@ -171,7 +171,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
}
private List<Connection> getConnectionsWithoutTransaction(final String dataSourceName, final int connectionSize, final ConnectionMode connectionMode) throws SQLException {
Preconditions.checkNotNull(logicSchema, "current logic schema is null");
Preconditions.checkNotNull(schema, "current schema is null");
List<Connection> result = getConnectionFromUnderlying(dataSourceName, connectionSize, connectionMode);
synchronized (cachedConnections) {
cachedConnections.putAll(dataSourceName, result);
......@@ -180,7 +180,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
}
private List<Connection> createNewConnections(final String dataSourceName, final int connectionSize, final ConnectionMode connectionMode) throws SQLException {
Preconditions.checkNotNull(logicSchema, "current logic schema is null");
Preconditions.checkNotNull(schema, "current schema is null");
List<Connection> result = getConnectionFromUnderlying(dataSourceName, connectionSize, connectionMode);
for (Connection each : result) {
replayMethodsInvocation(each);
......@@ -189,7 +189,7 @@ public final class BackendConnection implements JDBCExecutionConnection, AutoClo
}
private List<Connection> getConnectionFromUnderlying(final String dataSourceName, final int connectionSize, final ConnectionMode connectionMode) throws SQLException {
return logicSchema.getBackendDataSource().getConnections(dataSourceName, connectionSize, connectionMode, transactionType);
return schema.getBackendDataSource().getConnections(dataSourceName, connectionSize, connectionMode, transactionType);
}
@Override
......
......@@ -39,8 +39,8 @@ public final class BackendTransactionManager implements TransactionManager {
connection = backendConnection;
transactionType = connection.getTransactionType();
localTransactionManager = new LocalTransactionManager(backendConnection);
shardingTransactionManager = null == connection.getLogicSchema() ? null
: connection.getLogicSchema().getBackendDataSource().getShardingTransactionManagerEngine().getTransactionManager(transactionType);
shardingTransactionManager = null == connection.getSchema() ? null
: connection.getSchema().getBackendDataSource().getShardingTransactionManagerEngine().getTransactionManager(transactionType);
}
@Override
......
......@@ -110,7 +110,7 @@ public final class JDBCExecuteEngine implements SQLExecuteEngine {
}
private SQLExecutorCallback<ExecuteResponse> getSQLExecutorCallback(final ProxySQLExecutorCallback callback) {
Map<ShardingSphereRule, RuleProxySQLExecutorCallback> callbackMap = OrderedSPIRegistry.getRegisteredServices(backendConnection.getLogicSchema().getRules(), RuleProxySQLExecutorCallback.class);
Map<ShardingSphereRule, RuleProxySQLExecutorCallback> callbackMap = OrderedSPIRegistry.getRegisteredServices(backendConnection.getSchema().getRules(), RuleProxySQLExecutorCallback.class);
return callbackMap.isEmpty() ? callback : callbackMap.values().iterator().next();
}
......
......@@ -102,7 +102,7 @@ public final class ProxySQLExecutorCallback extends DefaultSQLExecutorCallback<E
private List<QueryHeader> getQueryHeaders(final ProjectionsContext projectionsContext, final ResultSetMetaData resultSetMetaData) throws SQLException {
List<QueryHeader> result = new LinkedList<>();
for (int columnIndex = 1; columnIndex <= projectionsContext.getExpandProjections().size(); columnIndex++) {
result.add(new QueryHeader(projectionsContext, resultSetMetaData, backendConnection.getLogicSchema(), columnIndex));
result.add(new QueryHeader(projectionsContext, resultSetMetaData, backendConnection.getSchema(), columnIndex));
}
return result;
}
......@@ -110,7 +110,7 @@ public final class ProxySQLExecutorCallback extends DefaultSQLExecutorCallback<E
private List<QueryHeader> getQueryHeaders(final ResultSetMetaData resultSetMetaData) throws SQLException {
List<QueryHeader> result = new LinkedList<>();
for (int columnIndex = 1; columnIndex <= resultSetMetaData.getColumnCount(); columnIndex++) {
result.add(new QueryHeader(resultSetMetaData, backendConnection.getLogicSchema(), columnIndex));
result.add(new QueryHeader(resultSetMetaData, backendConnection.getSchema(), columnIndex));
}
return result;
}
......
......@@ -19,7 +19,6 @@ package org.apache.shardingsphere.shardingproxy.backend.communication.jdbc.wrapp
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.shardingproxy.backend.schema.ShardingSphereSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShardingSphereSchema;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
......@@ -52,38 +51,29 @@ public final class PreparedStatementExecutorWrapper implements JDBCExecutorWrapp
private static final ShardingProxyContext SHARDING_PROXY_CONTEXT = ShardingProxyContext.getInstance();
private final ShardingSphereSchema logicSchema;
private final ShardingSphereSchema schema;
private final List<Object> parameters;
@SuppressWarnings("unchecked")
@Override
public ExecutionContext route(final String sql) {
if (logicSchema instanceof ShardingSphereSchema) {
return doShardingRoute(sql);
SQLStatement sqlStatement = schema.getSqlParserEngine().parse(sql, true);
Collection<ShardingSphereRule> rules = schema.getRules();
if (rules.isEmpty()) {
return new ExecutionContext(
new CommonSQLStatementContext(sqlStatement), new ExecutionUnit(schema.getDataSources().keySet().iterator().next(), new SQLUnit(sql, Collections.emptyList())));
}
return doTransparentRoute(sql);
}
private ExecutionContext doShardingRoute(final String sql) {
Collection<ShardingSphereRule> rules = logicSchema.getRules();
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, true);
RouteContext routeContext = new DataNodeRouter(logicSchema.getMetaData(), SHARDING_PROXY_CONTEXT.getProperties(), rules).route(sqlStatement, sql, parameters);
SQLRewriteResult sqlRewriteResult = new SQLRewriteEntry(logicSchema.getMetaData().getSchema().getConfiguredSchemaMetaData(),
RouteContext routeContext = new DataNodeRouter(schema.getMetaData(), SHARDING_PROXY_CONTEXT.getProperties(), rules).route(sqlStatement, sql, parameters);
SQLRewriteResult sqlRewriteResult = new SQLRewriteEntry(schema.getMetaData().getSchema().getConfiguredSchemaMetaData(),
SHARDING_PROXY_CONTEXT.getProperties(), rules).rewrite(sql, new ArrayList<>(parameters), routeContext);
return new ExecutionContext(routeContext.getSqlStatementContext(), ExecutionContextBuilder.build(logicSchema.getMetaData(), sqlRewriteResult));
}
@SuppressWarnings("unchecked")
private ExecutionContext doTransparentRoute(final String sql) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, false);
return new ExecutionContext(
new CommonSQLStatementContext(sqlStatement), new ExecutionUnit(logicSchema.getDataSources().keySet().iterator().next(), new SQLUnit(sql, Collections.emptyList())));
return new ExecutionContext(routeContext.getSqlStatementContext(), ExecutionContextBuilder.build(schema.getMetaData(), sqlRewriteResult));
}
@Override
public ExecuteGroupEngine getExecuteGroupEngine() {
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY);
return new PreparedStatementExecuteGroupEngine(maxConnectionsSizePerQuery, logicSchema.getRules());
return new PreparedStatementExecuteGroupEngine(maxConnectionsSizePerQuery, schema.getRules());
}
@Override
......
......@@ -19,7 +19,6 @@ package org.apache.shardingsphere.shardingproxy.backend.communication.jdbc.wrapp
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.shardingproxy.backend.schema.ShardingSphereSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShardingSphereSchema;
import org.apache.shardingsphere.shardingproxy.context.ShardingProxyContext;
import org.apache.shardingsphere.sql.parser.binder.statement.CommonSQLStatementContext;
import org.apache.shardingsphere.sql.parser.sql.statement.SQLStatement;
......@@ -49,36 +48,27 @@ public final class StatementExecutorWrapper implements JDBCExecutorWrapper {
private static final ShardingProxyContext SHARDING_PROXY_CONTEXT = ShardingProxyContext.getInstance();
private final ShardingSphereSchema logicSchema;
private final ShardingSphereSchema schema;
@SuppressWarnings("unchecked")
@Override
public ExecutionContext route(final String sql) {
if (logicSchema instanceof ShardingSphereSchema) {
return doShardingRoute(sql);
SQLStatement sqlStatement = schema.getSqlParserEngine().parse(sql, false);
Collection<ShardingSphereRule> rules = schema.getRules();
if (rules.isEmpty()) {
return new ExecutionContext(
new CommonSQLStatementContext(sqlStatement), new ExecutionUnit(schema.getDataSources().keySet().iterator().next(), new SQLUnit(sql, Collections.emptyList())));
}
return doTransparentRoute(sql);
}
private ExecutionContext doShardingRoute(final String sql) {
Collection<ShardingSphereRule> rules = logicSchema.getRules();
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, false);
RouteContext routeContext = new DataNodeRouter(logicSchema.getMetaData(), SHARDING_PROXY_CONTEXT.getProperties(), rules).route(sqlStatement, sql, Collections.emptyList());
SQLRewriteResult sqlRewriteResult = new SQLRewriteEntry(logicSchema.getMetaData().getSchema().getConfiguredSchemaMetaData(),
RouteContext routeContext = new DataNodeRouter(schema.getMetaData(), SHARDING_PROXY_CONTEXT.getProperties(), rules).route(sqlStatement, sql, Collections.emptyList());
SQLRewriteResult sqlRewriteResult = new SQLRewriteEntry(schema.getMetaData().getSchema().getConfiguredSchemaMetaData(),
SHARDING_PROXY_CONTEXT.getProperties(), rules).rewrite(sql, Collections.emptyList(), routeContext);
return new ExecutionContext(routeContext.getSqlStatementContext(), ExecutionContextBuilder.build(logicSchema.getMetaData(), sqlRewriteResult));
}
@SuppressWarnings("unchecked")
private ExecutionContext doTransparentRoute(final String sql) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(sql, false);
return new ExecutionContext(
new CommonSQLStatementContext(sqlStatement), new ExecutionUnit(logicSchema.getDataSources().keySet().iterator().next(), new SQLUnit(sql, Collections.emptyList())));
return new ExecutionContext(routeContext.getSqlStatementContext(), ExecutionContextBuilder.build(schema.getMetaData(), sqlRewriteResult));
}
@Override
public ExecuteGroupEngine getExecuteGroupEngine() {
int maxConnectionsSizePerQuery = ShardingProxyContext.getInstance().getProperties().<Integer>getValue(ConfigurationPropertyKey.MAX_CONNECTIONS_SIZE_PER_QUERY);
return new StatementExecuteGroupEngine(maxConnectionsSizePerQuery, logicSchema.getRules());
return new StatementExecuteGroupEngine(maxConnectionsSizePerQuery, schema.getRules());
}
@Override
......
......@@ -59,17 +59,17 @@ public final class QueryHeader {
private final boolean autoIncrement;
public QueryHeader(final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema logicSchema, final int columnIndex) throws SQLException {
this(resultSetMetaData, logicSchema, resultSetMetaData.getColumnName(columnIndex), columnIndex);
public QueryHeader(final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema schema, final int columnIndex) throws SQLException {
this(resultSetMetaData, schema, resultSetMetaData.getColumnName(columnIndex), columnIndex);
}
public QueryHeader(final ProjectionsContext projectionsContext, final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema logicSchema, final int columnIndex) throws SQLException {
this(resultSetMetaData, logicSchema, getColumnName(projectionsContext, resultSetMetaData, columnIndex), columnIndex);
public QueryHeader(final ProjectionsContext projectionsContext, final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema schema, final int columnIndex) throws SQLException {
this(resultSetMetaData, schema, getColumnName(projectionsContext, resultSetMetaData, columnIndex), columnIndex);
}
private QueryHeader(final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema logicSchema, final String columnName, final int columnIndex) throws SQLException {
private QueryHeader(final ResultSetMetaData resultSetMetaData, final ShardingSphereSchema schema, final String columnName, final int columnIndex) throws SQLException {
this.columnName = columnName;
schema = logicSchema.getName();
this.schema = schema.getName();
columnLabel = resultSetMetaData.getColumnLabel(columnIndex);
columnLength = resultSetMetaData.getColumnDisplaySize(columnIndex);
columnType = resultSetMetaData.getColumnType(columnIndex);
......@@ -78,10 +78,10 @@ public final class QueryHeader {
notNull = resultSetMetaData.isNullable(columnIndex) == ResultSetMetaData.columnNoNulls;
autoIncrement = resultSetMetaData.isAutoIncrement(columnIndex);
String actualTableName = resultSetMetaData.getTableName(columnIndex);
Optional<DataNodeRoutedRule> dataNodeRoutedRule = logicSchema.getRules().stream().filter(each -> each instanceof DataNodeRoutedRule).findFirst().map(rule -> (DataNodeRoutedRule) rule);
Optional<DataNodeRoutedRule> dataNodeRoutedRule = schema.getRules().stream().filter(each -> each instanceof DataNodeRoutedRule).findFirst().map(rule -> (DataNodeRoutedRule) rule);
if (null != actualTableName && dataNodeRoutedRule.isPresent()) {
table = dataNodeRoutedRule.get().findLogicTableByActualTable(actualTableName).orElse("");
TableMetaData tableMetaData = logicSchema.getMetaData().getSchema().getConfiguredSchemaMetaData().get(table);
TableMetaData tableMetaData = schema.getMetaData().getSchema().getConfiguredSchemaMetaData().get(table);
primaryKey = null != tableMetaData && tableMetaData.getColumns().get(resultSetMetaData.getColumnName(columnIndex).toLowerCase()).isPrimaryKey();
} else {
table = actualTableName;
......
......@@ -45,13 +45,13 @@ public final class UnicastBackendHandler implements TextProtocolBackendHandler {
@Override
public BackendResponse execute() {
// TODO we should remove set default logicSchema after parser can recognize all DAL broadcast SQL.
ShardingSphereSchema logicSchema = backendConnection.getLogicSchema();
if (null == logicSchema) {
logicSchema = ShardingSphereSchemas.getInstance().getSchemas().values().iterator().next();
backendConnection.setCurrentSchema(logicSchema.getName());
// TODO we should remove set default ShardingSphere schema after parser can recognize all DAL broadcast SQL.
ShardingSphereSchema schema = backendConnection.getSchema();
if (null == schema) {
schema = ShardingSphereSchemas.getInstance().getSchemas().values().iterator().next();
backendConnection.setCurrentSchema(schema.getName());
}
databaseCommunicationEngine = databaseCommunicationEngineFactory.newTextProtocolInstance(logicSchema, sql, backendConnection);
databaseCommunicationEngine = databaseCommunicationEngineFactory.newTextProtocolInstance(schema, sql, backendConnection);
return databaseCommunicationEngine.execute();
}
......
......@@ -45,10 +45,10 @@ public final class QueryBackendHandler implements TextProtocolBackendHandler {
@Override
public BackendResponse execute() {
if (null == backendConnection.getLogicSchema()) {
if (null == backendConnection.getSchema()) {
return new ErrorResponse(new NoDatabaseSelectedException());
}
databaseCommunicationEngine = databaseCommunicationEngineFactory.newTextProtocolInstance(backendConnection.getLogicSchema(), sql, backendConnection);
databaseCommunicationEngine = databaseCommunicationEngineFactory.newTextProtocolInstance(backendConnection.getSchema(), sql, backendConnection);
return databaseCommunicationEngine.execute();
}
......
......@@ -55,7 +55,7 @@ public final class ShardingCTLExplainBackendHandler implements TextProtocolBacke
if (!explainStatement.isPresent()) {
return new ErrorResponse(new InvalidShardingCTLFormatException(sql));
}
StatementExecutorWrapper statementExecutorWrapper = new StatementExecutorWrapper(backendConnection.getLogicSchema());
StatementExecutorWrapper statementExecutorWrapper = new StatementExecutorWrapper(backendConnection.getSchema());
executionUnits = statementExecutorWrapper.route(explainStatement.get().getSql()).getExecutionUnits().iterator();
queryHeaders = new ArrayList<>(2);
queryHeaders.add(new QueryHeader("", "", "datasource_name", "", 255, Types.CHAR, 0, false, false, false, false));
......
......@@ -55,7 +55,7 @@ public final class HintShowTableStatusExecutor extends AbstractHintQueryExecutor
@Override
protected MergedResult createMergedResult() {
Map<String, HintShowTableStatusResult> results = new HashMap<>();
for (String each : backendConnection.getLogicSchema().getMetaData().getSchema().getConfiguredSchemaMetaData().getAllTableNames()) {
for (String each : backendConnection.getSchema().getMetaData().getSchema().getConfiguredSchemaMetaData().getAllTableNames()) {
if (HintManager.isDatabaseShardingOnly()) {
fillShardingValues(results, each, HintManager.getDatabaseShardingValues(), Collections.emptyList());
} else {
......
......@@ -30,7 +30,7 @@ import static org.mockito.Mockito.mock;
public final class MockShardingSphereSchemasUtil {
/**
* Set logic schemas for global registry.
* Set schemas for global registry.
*
* @param prefix prefix of schema
* @param size size of schemas
......
......@@ -59,11 +59,11 @@ public final class TextProtocolBackendHandlerFactoryTest {
@Before
public void setUp() {
when(backendConnection.getTransactionType()).thenReturn(TransactionType.LOCAL);
ShardingSphereSchema logicSchema = mock(ShardingSphereSchema.class);
ShardingSphereSchema schema = mock(ShardingSphereSchema.class);
JDBCBackendDataSource backendDataSource = mock(JDBCBackendDataSource.class);
when(backendDataSource.getShardingTransactionManagerEngine()).thenReturn(mock(ShardingTransactionManagerEngine.class));
when(logicSchema.getBackendDataSource()).thenReturn(backendDataSource);
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
when(schema.getBackendDataSource()).thenReturn(backendDataSource);
when(backendConnection.getSchema()).thenReturn(schema);
}
@Test
......
......@@ -64,7 +64,7 @@ public final class BackendConnectionTest {
public void setUp() {
MockShardingSphereSchemasUtil.setSchemas("schema", 2);
backendConnection.setCurrentSchema("schema_0");
when(backendConnection.getLogicSchema().getBackendDataSource()).thenReturn(backendDataSource);
when(backendConnection.getSchema().getBackendDataSource()).thenReturn(backendDataSource);
when(backendDataSource.getShardingTransactionManagerEngine()).thenReturn(mock(ShardingTransactionManagerEngine.class));
}
......@@ -226,7 +226,7 @@ public final class BackendConnectionTest {
}
@Test(expected = ShardingSphereException.class)
public void assertFailedSwitchLogicSchemaWhileBegin() {
public void assertFailedSwitchSchemaWhileBegin() {
BackendTransactionManager transactionManager = new BackendTransactionManager(backendConnection);
transactionManager.begin();
backendConnection.setCurrentSchema("newSchema");
......
......@@ -41,7 +41,7 @@ import static org.mockito.Mockito.when;
public final class BackendTransactionManagerTest {
@Mock
private ShardingSphereSchema logicSchema;
private ShardingSphereSchema schema;
@Mock
private BackendConnection backendConnection;
......@@ -63,8 +63,8 @@ public final class BackendTransactionManagerTest {
when(shardingTransactionManagerEngine.getTransactionManager(TransactionType.XA)).thenReturn(shardingTransactionManager);
JDBCBackendDataSource backendDataSource = mock(JDBCBackendDataSource.class);
when(backendDataSource.getShardingTransactionManagerEngine()).thenReturn(shardingTransactionManagerEngine);
when(logicSchema.getBackendDataSource()).thenReturn(backendDataSource);
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
when(schema.getBackendDataSource()).thenReturn(backendDataSource);
when(backendConnection.getSchema()).thenReturn(schema);
when(backendConnection.getStateHandler()).thenReturn(stateHandler);
}
......
......@@ -18,7 +18,7 @@
package org.apache.shardingsphere.shardingproxy.backend.response.query;
import org.apache.shardingsphere.sharding.core.rule.ShardingRule;
import org.apache.shardingsphere.shardingproxy.backend.schema.impl.ShardingSphereSchema;
import org.apache.shardingsphere.shardingproxy.backend.schema.ShardingSphereSchema;
import org.apache.shardingsphere.sql.parser.binder.metadata.column.ColumnMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.index.IndexMetaData;
import org.apache.shardingsphere.sql.parser.binder.metadata.schema.SchemaMetaData;
......@@ -49,83 +49,83 @@ public final class QueryHeaderTest {
@Test
public void assertQueryHeaderSchema() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getSchema(), is("sharding_schema"));
}
@Test
public void assertQueryHeaderTable() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getTable(), is("t_logic_order"));
}
@Test
public void assertQueryHeaderColumnLabel() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getColumnLabel(), is("order_id"));
}
@Test
public void assertQueryHeaderColumnNameWithoutProjectionsContext() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getColumnName(), is("order_id"));
}
@Test
public void assertQueryHeaderColumnNameFromProjectionsContext() throws Exception {
QueryHeader header = new QueryHeader(createProjectionsContext(), createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createProjectionsContext(), createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getColumnName(), is("order_id"));
}
@Test
public void assertQueryHeaderColumnNameFromMetaData() throws Exception {
QueryHeader header = new QueryHeader(createProjectionsContext(), createResultSetMetaData(), getShardingSchema(), 2);
QueryHeader header = new QueryHeader(createProjectionsContext(), createResultSetMetaData(), getShardingSphereSchema(), 2);
assertThat(header.getColumnName(), is("expr"));
}
@Test
public void assertQueryHeaderColumnLength() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getColumnLength(), is(1));
}
@Test
public void assertQueryHeaderColumnType() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getColumnType(), is(Types.INTEGER));
}
@Test
public void assertQueryHeaderDecimals() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertThat(header.getDecimals(), is(1));
}
@Test
public void assertQueryHeaderSigned() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertTrue(header.isSigned());
}
@Test
public void assertQueryHeaderPrimaryKey() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertTrue(header.isPrimaryKey());
}
@Test
public void assertQueryHeaderNotNull() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertTrue(header.isNotNull());
}
@Test
public void assertQueryHeaderAutoIncrement() throws Exception {
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSchema(), 1);
QueryHeader header = new QueryHeader(createResultSetMetaData(), getShardingSphereSchema(), 1);
assertTrue(header.isAutoIncrement());
}
private ShardingSphereSchema getShardingSchema() {
private ShardingSphereSchema getShardingSphereSchema() {
ShardingSphereSchema result = mock(ShardingSphereSchema.class);
ColumnMetaData columnMetaData = new ColumnMetaData("order_id", Types.INTEGER, "int", true, false, false);
SchemaMetaData schemaMetaData = mock(SchemaMetaData.class);
......
......@@ -160,10 +160,10 @@ public final class ShardingCTLHintBackendHandlerTest {
@Test
public void assertShowTableStatus() throws SQLException {
clearThreadLocal();
ShardingSphereSchema logicSchema = mock(ShardingSphereSchema.class);
when(logicSchema.getMetaData()).thenReturn(
ShardingSphereSchema schema = mock(ShardingSphereSchema.class);
when(schema.getMetaData()).thenReturn(
new ShardingSphereMetaData(mock(DataSourceMetas.class), new RuleSchemaMetaData(new SchemaMetaData(ImmutableMap.of("user", mock(TableMetaData.class))), Collections.emptyMap())));
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
when(backendConnection.getSchema()).thenReturn(schema);
String sql = "sctl:hint show table status";
ShardingCTLHintBackendHandler defaultShardingCTLHintBackendHandler = new ShardingCTLHintBackendHandler(sql, backendConnection);
BackendResponse backendResponse = defaultShardingCTLHintBackendHandler.execute();
......
......@@ -68,7 +68,7 @@ public final class MySQLComStmtExecuteExecutor implements QueryCommandExecutor {
public MySQLComStmtExecuteExecutor(final MySQLComStmtExecutePacket comStmtExecutePacket, final BackendConnection backendConnection) {
databaseCommunicationEngine = DatabaseCommunicationEngineFactory.getInstance().newBinaryProtocolInstance(
backendConnection.getLogicSchema(), comStmtExecutePacket.getSql(), comStmtExecutePacket.getParameters(), backendConnection);
backendConnection.getSchema(), comStmtExecutePacket.getSql(), comStmtExecutePacket.getParameters(), backendConnection);
}
@Override
......
......@@ -44,11 +44,11 @@ public final class MySQLComStmtPrepareExecutor implements CommandExecutor {
private final MySQLComStmtPreparePacket packet;
private final ShardingSphereSchema logicSchema;
private final ShardingSphereSchema schema;
public MySQLComStmtPrepareExecutor(final MySQLComStmtPreparePacket packet, final BackendConnection backendConnection) {
this.packet = packet;
logicSchema = backendConnection.getLogicSchema();
schema = backendConnection.getSchema();
}
private int getColumnsCount(final SQLStatement sqlStatement) {
......@@ -59,7 +59,7 @@ public final class MySQLComStmtPrepareExecutor implements CommandExecutor {
public Collection<DatabasePacket> execute() {
Collection<DatabasePacket> result = new LinkedList<>();
int currentSequenceId = 0;
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(packet.getSql(), true);
SQLStatement sqlStatement = schema.getSqlParserEngine().parse(packet.getSql(), true);
if (!MySQLComStmtPrepareChecker.isStatementAllowed(sqlStatement)) {
result.add(new MySQLErrPacket(++currentSequenceId, MySQLServerErrorCode.ER_UNSUPPORTED_PS));
return result;
......
......@@ -51,7 +51,7 @@ public final class MySQLComFieldListPacketExecutor implements CommandExecutor {
public MySQLComFieldListPacketExecutor(final MySQLComFieldListPacket packet, final BackendConnection backendConnection) {
this.packet = packet;
schemaName = backendConnection.getSchemaName();
databaseCommunicationEngine = DatabaseCommunicationEngineFactory.getInstance().newTextProtocolInstance(backendConnection.getLogicSchema(), getShowColumnsSQL(), backendConnection);
databaseCommunicationEngine = DatabaseCommunicationEngineFactory.getInstance().newTextProtocolInstance(backendConnection.getSchema(), getShowColumnsSQL(), backendConnection);
}
@Override
......
......@@ -49,7 +49,7 @@ public final class MySQLCommandExecutorFactoryTest {
@Test
public void assertNewInstance() {
BackendConnection backendConnection = mock(BackendConnection.class);
when(backendConnection.getLogicSchema()).thenReturn(mock(ShardingSphereSchema.class));
when(backendConnection.getSchema()).thenReturn(mock(ShardingSphereSchema.class));
assertThat(MySQLCommandExecutorFactory.newInstance(MySQLCommandPacketType.COM_QUIT,
mock(CommandPacket.class), backendConnection), instanceOf(MySQLComQuitExecutor.class));
assertThat(MySQLCommandExecutorFactory.newInstance(MySQLCommandPacketType.COM_INIT_DB,
......
......@@ -53,9 +53,9 @@ public class MySQLComStmtExecuteExecutorTest {
@SneakyThrows
public void assertIsErrorResponse() {
BackendConnection backendConnection = mock(BackendConnection.class);
ShardingSphereSchema logicSchema = mock(ShardingSphereSchema.class);
when(logicSchema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
ShardingSphereSchema schema = mock(ShardingSphereSchema.class);
when(schema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getSchema()).thenReturn(schema);
MySQLComStmtExecuteExecutor mysqlComStmtExecuteExecutor = new MySQLComStmtExecuteExecutor(mock(MySQLComStmtExecutePacket.class), backendConnection);
FieldSetter.setField(mysqlComStmtExecuteExecutor, MySQLComStmtExecuteExecutor.class.getDeclaredField("databaseCommunicationEngine"), databaseCommunicationEngine);
when(sqlException.getCause()).thenReturn(new Exception());
......@@ -68,9 +68,9 @@ public class MySQLComStmtExecuteExecutorTest {
@SneakyThrows
public void assertIsUpdateResponse() {
BackendConnection backendConnection = mock(BackendConnection.class);
ShardingSphereSchema logicSchema = mock(ShardingSphereSchema.class);
when(logicSchema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
ShardingSphereSchema schema = mock(ShardingSphereSchema.class);
when(schema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getSchema()).thenReturn(schema);
MySQLComStmtExecuteExecutor mysqlComStmtExecuteExecutor = new MySQLComStmtExecuteExecutor(mock(MySQLComStmtExecutePacket.class), backendConnection);
FieldSetter.setField(mysqlComStmtExecuteExecutor, MySQLComStmtExecuteExecutor.class.getDeclaredField("databaseCommunicationEngine"), databaseCommunicationEngine);
when(databaseCommunicationEngine.execute()).thenReturn(new UpdateResponse());
......@@ -82,9 +82,9 @@ public class MySQLComStmtExecuteExecutorTest {
@SneakyThrows
public void assertIsQuery() {
BackendConnection backendConnection = mock(BackendConnection.class);
ShardingSphereSchema logicSchema = mock(ShardingSphereSchema.class);
when(logicSchema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getLogicSchema()).thenReturn(logicSchema);
ShardingSphereSchema schema = mock(ShardingSphereSchema.class);
when(schema.getRules()).thenReturn(Collections.emptyList());
when(backendConnection.getSchema()).thenReturn(schema);
MySQLComStmtExecuteExecutor mysqlComStmtExecuteExecutor = new MySQLComStmtExecuteExecutor(mock(MySQLComStmtExecutePacket.class), backendConnection);
FieldSetter.setField(mysqlComStmtExecuteExecutor, MySQLComStmtExecuteExecutor.class.getDeclaredField("databaseCommunicationEngine"), databaseCommunicationEngine);
when(databaseCommunicationEngine.execute()).thenReturn(new QueryResponse(Collections.singletonList(mock(QueryHeader.class))));
......
......@@ -70,7 +70,7 @@ public final class PostgreSQLComBindExecutor implements QueryCommandExecutor {
public PostgreSQLComBindExecutor(final PostgreSQLComBindPacket packet, final BackendConnection backendConnection) {
this.packet = packet;
databaseCommunicationEngine = null == packet.getSql()
? null : DatabaseCommunicationEngineFactory.getInstance().newBinaryProtocolInstance(backendConnection.getLogicSchema(), packet.getSql(), packet.getParameters(), backendConnection);
? null : DatabaseCommunicationEngineFactory.getInstance().newBinaryProtocolInstance(backendConnection.getSchema(), packet.getSql(), packet.getParameters(), backendConnection);
}
@Override
......
......@@ -37,20 +37,20 @@ public final class PostgreSQLComParseExecutor implements CommandExecutor {
private final PostgreSQLComParsePacket packet;
private final ShardingSphereSchema logicSchema;
private final ShardingSphereSchema schema;
private final ConnectionScopeBinaryStatementRegistry binaryStatementRegistry;
public PostgreSQLComParseExecutor(final PostgreSQLComParsePacket packet, final BackendConnection backendConnection) {
this.packet = packet;
logicSchema = backendConnection.getLogicSchema();
schema = backendConnection.getSchema();
binaryStatementRegistry = BinaryStatementRegistry.getInstance().get(backendConnection.getConnectionId());
}
@Override
public Collection<DatabasePacket> execute() {
if (!packet.getSql().isEmpty()) {
SQLStatement sqlStatement = logicSchema.getSqlParserEngine().parse(packet.getSql(), true);
SQLStatement sqlStatement = schema.getSqlParserEngine().parse(packet.getSql(), true);
binaryStatementRegistry.register(packet.getStatementId(), packet.getSql(), sqlStatement.getParameterCount(), packet.getBinaryStatementParameterTypes());
}
return Collections.singletonList(new PostgreSQLParseCompletePacket());
......
......@@ -27,11 +27,7 @@ import lombok.NoArgsConstructor;
public final class DefaultSchema {
/**
* Logic schema name.
*
* <p>
* ShardingSphere just have one logic database table.
* </p>
* Schema name.
*/
public static final String LOGIC_NAME = "logic_db";
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册