Skip to content

Commit

Permalink
apache#7318, support disconnected data source (apache#8837)
Browse files Browse the repository at this point in the history
* apache#7318, support disconnected data source

* apache#7318, support disconnected data source
  • Loading branch information
tuohai666 authored Dec 31, 2020
1 parent 6679b9a commit 0cede36
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,17 @@

import lombok.Setter;
import org.apache.shardingsphere.ha.api.config.HARuleConfiguration;
import org.apache.shardingsphere.ha.api.config.rule.HADataSourceRuleConfiguration;
import org.apache.shardingsphere.ha.constant.HAOrder;
import org.apache.shardingsphere.ha.rule.HARule;
import org.apache.shardingsphere.infra.database.type.DatabaseType;
import org.apache.shardingsphere.infra.rule.builder.ShardingSphereRuleBuilder;
import org.apache.shardingsphere.infra.rule.builder.aware.ResourceAware;

import javax.sql.DataSource;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

/**
* HA rule builder.
Expand All @@ -42,6 +45,12 @@ public final class HARuleBuilder implements ShardingSphereRuleBuilder<HARule, HA

@Override
public HARule build(final HARuleConfiguration ruleConfig) {
Set<String> dataSourceSet = new HashSet<>(128, 1);
for (HADataSourceRuleConfiguration each : ruleConfig.getDataSources()) {
dataSourceSet.add(each.getPrimaryDataSourceName());
dataSourceSet.addAll(each.getReplicaDataSourceNames());
}
dataSourceMap.entrySet().removeIf(stringDataSourceEntry -> !dataSourceSet.contains(stringDataSourceEntry.getKey()));
return new HARule(ruleConfig, databaseType, dataSourceMap, schemaName);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,18 @@

package org.apache.shardingsphere.ha.rule.biulder;

import org.apache.shardingsphere.infra.rule.builder.ShardingSphereRuleBuilder;
import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader;
import org.apache.shardingsphere.infra.spi.ordered.OrderedSPIRegistry;
import org.apache.shardingsphere.ha.api.config.HARuleConfiguration;
import org.apache.shardingsphere.ha.api.config.rule.HADataSourceRuleConfiguration;
import org.apache.shardingsphere.ha.rule.HARule;
import org.apache.shardingsphere.infra.rule.builder.ShardingSphereRuleBuilder;
import org.apache.shardingsphere.infra.spi.ShardingSphereServiceLoader;
import org.apache.shardingsphere.infra.spi.ordered.OrderedSPIRegistry;
import org.junit.Test;

import javax.sql.DataSource;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
Expand All @@ -46,6 +49,9 @@ public void assertBuild() {
"name", "primaryDataSourceName", Collections.singletonList("name"), "loadBalancerName", true);
when(ruleConfig.getDataSources()).thenReturn(Collections.singletonList(dataSourceRuleConfig));
ShardingSphereRuleBuilder builder = OrderedSPIRegistry.getRegisteredServices(Collections.singletonList(ruleConfig), ShardingSphereRuleBuilder.class).get(ruleConfig);
Map<String, DataSource> dataSourceMap = new HashMap<>();
dataSourceMap.put("primaryDataSourceName", mock(DataSource.class));
((HARuleBuilder) builder).setDataSourceMap(dataSourceMap);
assertThat(builder.build(ruleConfig), instanceOf(HARule.class));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -163,15 +163,12 @@ public synchronized void renew(final SchemaPersistEvent event) {
*/
@Subscribe
public synchronized void renew(final PrimaryDataSourceUpdateEvent event) {
Map<String, DataSourceConfiguration> dataSourceConfigurations = loadDataSourceConfigurations(event.getSchemaName());
dataSourceConfigurations.remove(event.getOldPrimaryDataSource());
Collection<RuleConfiguration> ruleConfigurations = loadRuleConfigurations(event.getSchemaName());
for (RuleConfiguration each : ruleConfigurations) {
if (each instanceof HARuleConfiguration) {
updateHaDataSourceRuleConfigurations(event, (HARuleConfiguration) each);
}
}
persistDataSourceConfigurations(event.getSchemaName(), dataSourceConfigurations);
persistRuleConfigurations(event.getSchemaName(), ruleConfigurations);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,8 @@ private DatabaseType getDatabaseType(final Map<String, DataSource> dataSourceMap
private DatabaseType getDatabaseType(final DataSource dataSource) throws SQLException {
try (Connection connection = dataSource.getConnection()) {
return DatabaseTypeRegistry.getDatabaseTypeByURL(connection.getMetaData().getURL());
} catch (final SQLException ex) {
return null;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.zaxxer.hikari.HikariDataSource;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.shardingsphere.infra.config.datasource.JDBCParameterDecorator;
import org.apache.shardingsphere.infra.config.datasource.DataSourceParameter;
import org.apache.shardingsphere.infra.exception.ShardingSphereException;
Expand All @@ -34,6 +35,7 @@
* Backend data source factory using {@code HikariDataSource} for JDBC raw.
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
@Slf4j
public final class JDBCRawBackendDataSourceFactory implements JDBCBackendDataSourceFactory {

private static final JDBCRawBackendDataSourceFactory INSTANCE = new JDBCRawBackendDataSourceFactory();
Expand Down Expand Up @@ -67,7 +69,15 @@ public DataSource build(final String dataSourceName, final DataSourceParameter d
config.setMaximumPoolSize(dataSourceParameter.getMaxPoolSize());
config.setMinimumIdle(dataSourceParameter.getMinPoolSize());
config.setReadOnly(dataSourceParameter.isReadOnly());
DataSource result = new HikariDataSource(config);
DataSource result;
try {
result = new HikariDataSource(config);
// CHECKSTYLE:OFF
} catch (final Exception ex) {
// CHECKSTYLE:ON
log.error("Exception occur: ", ex);
return null;
}
Optional<JDBCParameterDecorator> decorator = findJDBCParameterDecorator(result);
return decorator.isPresent() ? decorator.get().decorate(result) : result;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,10 @@ private static Map<String, Map<String, DataSource>> createDataSourcesMap(final M
private static Map<String, DataSource> createDataSources(final Map<String, DataSourceParameter> dataSourceParameters) {
Map<String, DataSource> result = new LinkedHashMap<>(dataSourceParameters.size(), 1);
for (Entry<String, DataSourceParameter> entry : dataSourceParameters.entrySet()) {
result.put(entry.getKey(), JDBCRawBackendDataSourceFactory.getInstance().build(entry.getKey(), entry.getValue()));
DataSource dataSource = JDBCRawBackendDataSourceFactory.getInstance().build(entry.getKey(), entry.getValue());
if (null != dataSource) {
result.put(entry.getKey(), dataSource);
}
}
return result;
}
Expand Down

0 comments on commit 0cede36

Please sign in to comment.