Here are the examples of the java api org.springframework.jdbc.core.JdbcOperations taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
59 Examples
19
View Source File : DbLogStorage.java
License : MIT License
Project Creator : yuantiku
License : MIT License
Project Creator : yuantiku
/**
* @author pw
*/
public clreplaced DbLogStorage {
private final JdbcOperations db;
public DbLogStorage(JdbcOperations db) {
this.db = db;
}
public void create(String resource, String action, String clreplacedName, String methodName) {
db.update("insert into `log` (resource, `action`, clreplacedName, methodName, createdTime) values (?, ?, ?, ?, ?)", resource, action, clreplacedName, methodName, System.currentTimeMillis());
}
}
19
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void dataSourceInitializedWithMultipleScripts() {
this.contextRunner.withPropertyValues("spring.datasource.initialization-mode:always", "spring.datasource.schema:" + getRelativeLocationFor("schema.sql") + "," + getRelativeLocationFor("another.sql"), "spring.datasource.data:" + getRelativeLocationFor("data.sql")).run((context) -> {
DataSource dataSource = context.getBean(DataSource.clreplaced);
replacedertThat(dataSource).isInstanceOf(HikariDataSource.clreplaced);
replacedertThat(dataSource).isNotNull();
JdbcOperations template = new JdbcTemplate(dataSource);
replacedertThat(template.queryForObject("SELECT COUNT(*) from FOO", Integer.clreplaced)).isEqualTo(1);
replacedertThat(template.queryForObject("SELECT COUNT(*) from SPAM", Integer.clreplaced)).isEqualTo(0);
});
}
19
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void multipleScriptsAppliedInLexicalOrder() {
new ApplicationContextRunner(() -> {
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext();
context.setResourceLoader(new ReverseOrderResourceLoader(new DefaultResourceLoader()));
return context;
}).withConfiguration(AutoConfigurations.of(DataSourceAutoConfiguration.clreplaced)).withPropertyValues("spring.datasource.initialization-mode=always", "spring.datasource.url:jdbc:hsqldb:mem:testdb-" + new Random().nextInt(), "spring.datasource.schema:" + getRelativeLocationFor("lexical-schema-*.sql"), "spring.datasource.data:" + getRelativeLocationFor("data.sql")).run((context) -> {
DataSource dataSource = context.getBean(DataSource.clreplaced);
replacedertThat(dataSource).isInstanceOf(HikariDataSource.clreplaced);
replacedertThat(dataSource).isNotNull();
JdbcOperations template = new JdbcTemplate(dataSource);
replacedertThat(template.queryForObject("SELECT COUNT(*) from FOO", Integer.clreplaced)).isEqualTo(1);
});
}
19
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void dataSourceInitializedWithExplicitScript() {
this.contextRunner.withPropertyValues("spring.datasource.initialization-mode:always", "spring.datasource.schema:" + getRelativeLocationFor("schema.sql"), "spring.datasource.data:" + getRelativeLocationFor("data.sql")).run((context) -> {
DataSource dataSource = context.getBean(DataSource.clreplaced);
replacedertThat(dataSource).isInstanceOf(HikariDataSource.clreplaced);
replacedertThat(dataSource).isNotNull();
JdbcOperations template = new JdbcTemplate(dataSource);
replacedertThat(template.queryForObject("SELECT COUNT(*) from FOO", Integer.clreplaced)).isEqualTo(1);
});
}
19
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void dataSourceInitializedWithExplicitSqlScriptEncoding() {
this.contextRunner.withPropertyValues("spring.datasource.initialization-mode:always", "spring.datasource.sqlScriptEncoding:UTF-8", "spring.datasource.schema:" + getRelativeLocationFor("encoding-schema.sql"), "spring.datasource.data:" + getRelativeLocationFor("encoding-data.sql")).run((context) -> {
DataSource dataSource = context.getBean(DataSource.clreplaced);
replacedertThat(dataSource).isInstanceOf(HikariDataSource.clreplaced);
replacedertThat(dataSource).isNotNull();
JdbcOperations template = new JdbcTemplate(dataSource);
replacedertThat(template.queryForObject("SELECT COUNT(*) from BAR", Integer.clreplaced)).isEqualTo(2);
replacedertThat(template.queryForObject("SELECT name from BAR WHERE id=1", String.clreplaced)).isEqualTo("bar");
replacedertThat(template.queryForObject("SELECT name from BAR WHERE id=2", String.clreplaced)).isEqualTo("ばー");
});
}
19
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
private void replacedertDataSourceIsInitialized(DataSource dataSource) {
JdbcOperations template = new JdbcTemplate(dataSource);
replacedertThat(template.queryForObject("SELECT COUNT(*) from BAR", Integer.clreplaced)).isEqualTo(1);
}
19
View Source File : IntegrationAutoConfigurationTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void integrationJdbcDataSourceInitializerEnabledByDefaultWithEmbeddedDb() {
this.contextRunner.withUserConfiguration(EmbeddedDataSourceConfiguration.clreplaced).withConfiguration(AutoConfigurations.of(DataSourceTransactionManagerAutoConfiguration.clreplaced, JdbcTemplateAutoConfiguration.clreplaced, IntegrationAutoConfiguration.clreplaced)).withPropertyValues("spring.datasource.generate-unique-name=true").run((context) -> {
IntegrationProperties properties = context.getBean(IntegrationProperties.clreplaced);
replacedertThat(properties.getJdbc().getInitializeSchema()).isEqualTo(DataSourceInitializationMode.EMBEDDED);
JdbcOperations jdbc = context.getBean(JdbcOperations.clreplaced);
replacedertThat(jdbc.queryForList("select * from INT_MESSAGE")).isEmpty();
});
}
19
View Source File : IntegrationAutoConfigurationTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void integrationJdbcDataSourceInitializerDisabled() {
this.contextRunner.withUserConfiguration(EmbeddedDataSourceConfiguration.clreplaced).withConfiguration(AutoConfigurations.of(DataSourceTransactionManagerAutoConfiguration.clreplaced, JdbcTemplateAutoConfiguration.clreplaced, IntegrationAutoConfiguration.clreplaced)).withPropertyValues("spring.datasource.generate-unique-name=true", "spring.integration.jdbc.initialize-schema=never").run((context) -> {
IntegrationProperties properties = context.getBean(IntegrationProperties.clreplaced);
replacedertThat(properties.getJdbc().getInitializeSchema()).isEqualTo(DataSourceInitializationMode.NEVER);
JdbcOperations jdbc = context.getBean(JdbcOperations.clreplaced);
replacedertThatExceptionOfType(BadSqlGrammarException.clreplaced).isThrownBy(() -> jdbc.queryForList("select * from INT_MESSAGE"));
});
}
19
View Source File : IntegrationAutoConfigurationTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
@Test
public void integrationJdbcDataSourceInitializerEnabled() {
this.contextRunner.withUserConfiguration(EmbeddedDataSourceConfiguration.clreplaced).withConfiguration(AutoConfigurations.of(DataSourceTransactionManagerAutoConfiguration.clreplaced, JdbcTemplateAutoConfiguration.clreplaced, IntegrationAutoConfiguration.clreplaced)).withPropertyValues("spring.datasource.generate-unique-name=true", "spring.integration.jdbc.initialize-schema=always").run((context) -> {
IntegrationProperties properties = context.getBean(IntegrationProperties.clreplaced);
replacedertThat(properties.getJdbc().getInitializeSchema()).isEqualTo(DataSourceInitializationMode.ALWAYS);
JdbcOperations jdbc = context.getBean(JdbcOperations.clreplaced);
replacedertThat(jdbc.queryForList("select * from INT_MESSAGE")).isEmpty();
replacedertThat(jdbc.queryForList("select * from INT_GROUP_TO_MESSAGE")).isEmpty();
replacedertThat(jdbc.queryForList("select * from INT_MESSAGE_GROUP")).isEmpty();
replacedertThat(jdbc.queryForList("select * from INT_LOCK")).isEmpty();
replacedertThat(jdbc.queryForList("select * from INT_CHANNEL_MESSAGE")).isEmpty();
});
}
19
View Source File : Oracle11QueuePickTaskDao.java
License : MIT License
Project Creator : yoomoney-tech
License : MIT License
Project Creator : yoomoney-tech
/**
* Database access object to pick tasks in the queue for Oracle database type.
*
* @author Oleg Kandaurov
* @since 15.05.2020
*/
@SuppressFBWarnings({ "UCPM_USE_CHARACTER_PARAMETERIZED_METHOD", "ISB_INEFFICIENT_STRING_BUFFERING" })
public clreplaced Oracle11QueuePickTaskDao implements QueuePickTaskDao {
private final Map<QueueLocation, String> pickTaskSqlCache = new ConcurrentHashMap<>();
@Nonnull
private final JdbcOperations jdbcTemplate;
@Nonnull
private final QueueTableSchema queueTableSchema;
@Nonnull
private final PickTaskSettings pickTaskSettings;
public Oracle11QueuePickTaskDao(@Nonnull JdbcOperations jdbcTemplate, @Nonnull QueueTableSchema queueTableSchema, @Nonnull PickTaskSettings pickTaskSettings) {
this.jdbcTemplate = Objects.requireNonNull(jdbcTemplate);
this.queueTableSchema = Objects.requireNonNull(queueTableSchema);
this.pickTaskSettings = Objects.requireNonNull(pickTaskSettings);
}
@Nullable
@Override
@SuppressFBWarnings("SQL_INJECTION_SPRING_JDBC")
public TaskRecord pickTask(@Nonnull QueueLocation location) {
PickTaskCallableStatement pickTaskStatement = new PickTaskCallableStatement(queueTableSchema, location, pickTaskSettings);
return jdbcTemplate.execute(pickTaskSqlCache.computeIfAbsent(location, this::createPickTaskSql), pickTaskStatement);
}
private static clreplaced PickTaskCallableStatement implements CallableStatementCallback<TaskRecord> {
private final QueueLocation queueLocation;
private final PickTaskSettings pickTaskSettings;
private final QueueTableSchema queueTableSchema;
public PickTaskCallableStatement(QueueTableSchema queueTableSchema, QueueLocation queueLocation, PickTaskSettings pickTaskSettings) {
this.queueLocation = queueLocation;
this.pickTaskSettings = pickTaskSettings;
this.queueTableSchema = queueTableSchema;
}
@Override
public TaskRecord doInCallableStatement(CallableStatement cs) throws SQLException, DataAccessException {
int inputIndex = 1;
cs.setString(inputIndex++, queueLocation.getQueueId().replacedtring());
cs.setLong(inputIndex++, pickTaskSettings.getRetryInterval().getSeconds());
cs.registerOutParameter(inputIndex++, java.sql.Types.BIGINT);
cs.registerOutParameter(inputIndex++, java.sql.Types.CLOB);
cs.registerOutParameter(inputIndex++, java.sql.Types.BIGINT);
cs.registerOutParameter(inputIndex++, java.sql.Types.BIGINT);
cs.registerOutParameter(inputIndex++, java.sql.Types.BIGINT);
cs.registerOutParameter(inputIndex++, java.sql.Types.TIMESTAMP);
cs.registerOutParameter(inputIndex++, java.sql.Types.TIMESTAMP);
for (String ignored : queueTableSchema.getExtFields()) {
cs.registerOutParameter(inputIndex++, Types.VARCHAR);
}
cs.execute();
int resultIndex = 3;
long id = cs.getLong(resultIndex++);
if (id == 0L) {
return null;
}
TaskRecord.Builder builder = TaskRecord.builder().withId(id).withPayload(cs.getString(resultIndex++)).withAttemptsCount(cs.getLong(resultIndex++)).withReenqueueAttemptsCount(cs.getLong(resultIndex++)).withTotalAttemptsCount(cs.getLong(resultIndex++)).withCreatedAt(getZonedDateTime(cs.getTimestamp(resultIndex++))).withNextProcessAt(getZonedDateTime(cs.getTimestamp(resultIndex++)));
Map<String, String> extData = new HashMap<>(queueTableSchema.getExtFields().size());
for (String field : queueTableSchema.getExtFields()) {
extData.put(field, cs.getString(resultIndex++));
}
return builder.withExtData(extData).build();
}
private ZonedDateTime getZonedDateTime(Timestamp timestamp) {
return ZonedDateTime.ofInstant(timestamp.toInstant(), ZoneId.systemDefault());
}
}
@Nonnull
private String getNextProcessTimeSql(@Nonnull TaskRetryType taskRetryType) {
Objects.requireNonNull(taskRetryType);
switch(taskRetryType) {
case GEOMETRIC_BACKOFF:
return "CURRENT_TIMESTAMP + power(2, " + "rattempt) * ? * (INTERVAL '1' SECOND)";
case ARITHMETIC_BACKOFF:
return "CURRENT_TIMESTAMP + (1 + (" + "rattempt * 2)) * ? * (INTERVAL '1' SECOND)";
case LINEAR_BACKOFF:
return "CURRENT_TIMESTAMP + ? * (INTERVAL '1' SECOND)";
default:
throw new IllegalStateException("unknown retry type: " + taskRetryType);
}
}
private String createPickTaskSql(QueueLocation queueLocation) {
StringBuilder declaration = new StringBuilder("DECLARE\n" + " rid " + queueLocation.getTableName() + "." + queueTableSchema.getIdField() + "%TYPE;\n" + " rpayload " + queueLocation.getTableName() + "." + queueTableSchema.getPayloadField() + "%TYPE;\n" + " rattempt " + queueLocation.getTableName() + "." + queueTableSchema.getAttemptField() + "%TYPE;\n" + " rreenqueue_attempt " + queueLocation.getTableName() + "." + queueTableSchema.getReenqueueAttemptField() + "%TYPE;\n" + " rtotal_attempt " + queueLocation.getTableName() + "." + queueTableSchema.getTotalAttemptField() + "%TYPE;\n" + " rcreated_at " + queueLocation.getTableName() + "." + queueTableSchema.getCreatedAtField() + "%TYPE;\n" + " rnext_process_at " + queueLocation.getTableName() + "." + queueTableSchema.getNextProcessAtField() + "%TYPE;\n");
queueTableSchema.getExtFields().forEach(field -> declaration.append("r").append(field).append(" ").append(queueLocation.getTableName()).append(".").append(field).append("%TYPE;\n"));
StringBuilder cursorSelect = new StringBuilder(" CURSOR c IS SELECT " + queueTableSchema.getIdField() + ", " + queueTableSchema.getPayloadField() + ", " + queueTableSchema.getAttemptField() + ", " + queueTableSchema.getReenqueueAttemptField() + ", " + queueTableSchema.getTotalAttemptField() + ", " + queueTableSchema.getCreatedAtField() + ", ");
queueTableSchema.getExtFields().forEach(field -> cursorSelect.append(field).append(", "));
cursorSelect.append(queueTableSchema.getNextProcessAtField()).append(" ");
final String fetchCursor = " FROM " + queueLocation.getTableName() + " " + " WHERE " + queueTableSchema.getQueueNameField() + " = ? AND " + queueTableSchema.getNextProcessAtField() + " <= CURRENT_TIMESTAMP" + " FOR UPDATE SKIP LOCKED;" + " BEGIN \n" + " OPEN c; \n" + " FETCH c INTO ";
StringBuilder fetchParams = new StringBuilder("rid, " + "rpayload, " + "rattempt, " + "rreenqueue_attempt, " + "rtotal_attempt, " + "rcreated_at, ");
queueTableSchema.getExtFields().forEach(field -> fetchParams.append("r").append(field).append(", "));
fetchParams.append("rnext_process_at;\n");
String updateSql = "IF (c%NOTFOUND) THEN \n" + " rid := 0;\n " + " END IF\n;" + " CLOSE c;\n" + " IF (rid > 0) THEN \n" + " rnext_process_at := " + getNextProcessTimeSql(pickTaskSettings.getRetryType()) + ";\n" + " rattempt := rattempt + 1;\n" + " rtotal_attempt := rtotal_attempt + 1;\n" + " UPDATE " + queueLocation.getTableName() + " SET " + queueTableSchema.getNextProcessAtField() + " = rnext_process_at, " + queueTableSchema.getAttemptField() + " = rattempt, " + queueTableSchema.getTotalAttemptField() + " = rtotal_attempt WHERE " + queueTableSchema.getIdField() + " = rid; \n" + " END IF;";
StringBuilder returnParams = new StringBuilder("\n ? := rid; " + "\n ? := rpayload; " + "\n ? := rattempt; " + "\n ? := rreenqueue_attempt; " + "\n ? := rtotal_attempt; " + "\n ? := rcreated_at; " + "\n ? := rnext_process_at; ");
queueTableSchema.getExtFields().forEach(field -> returnParams.append("\n ? := r").append(field).append("; "));
returnParams.append("\n END; ");
return declaration.toString() + cursorSelect + fetchCursor + fetchParams + updateSql + returnParams;
}
}
19
View Source File : QueueShard.java
License : MIT License
Project Creator : yoomoney-tech
License : MIT License
Project Creator : yoomoney-tech
/**
* Properties for connection to a database shard.
*
* @author Oleg Kandaurov
* @since 13.08.2018
*/
public clreplaced QueueShard {
@Nonnull
private final DatabaseDialect databaseDialect;
@Nonnull
private final QueueShardId shardId;
@Nonnull
private final JdbcOperations jdbcTemplate;
@Nonnull
private final TransactionOperations transactionTemplate;
@Nonnull
private final QueueTableSchema queueTableSchema;
@Nonnull
private final QueueDao queueDao;
/**
* Constructor
*
* @param databaseDialect Database type (dialect)
* @param queueTableSchema Queue table scheme.
* @param shardId Shard identifier.
* @param jdbcTemplate Reference to Spring JDBC template.
* @param transactionTemplate Reference to Spring Transaction template.
*/
public QueueShard(@Nonnull DatabaseDialect databaseDialect, @Nonnull QueueTableSchema queueTableSchema, @Nonnull QueueShardId shardId, @Nonnull JdbcOperations jdbcTemplate, @Nonnull TransactionOperations transactionTemplate) {
this.databaseDialect = requireNonNull(databaseDialect);
this.shardId = requireNonNull(shardId);
this.jdbcTemplate = requireNonNull(jdbcTemplate);
this.transactionTemplate = requireNonNull(transactionTemplate);
this.queueTableSchema = requireNonNull(queueTableSchema);
this.queueDao = QueueDao.Factory.create(databaseDialect, jdbcTemplate, queueTableSchema);
}
/**
* Get shard identifier.
*
* @return Shard identifier.
*/
@Nonnull
public QueueShardId getShardId() {
return shardId;
}
/**
* Get reference to the Spring JDBC template for that shard.
*
* @return Reference to Spring JDBC template.
*/
@Nonnull
public JdbcOperations getJdbcTemplate() {
return jdbcTemplate;
}
/**
* Get reference to the Spring Transaction template for that shard.
*
* @return Reference to Spring Transaction template.
*/
@Nonnull
public TransactionOperations getTransactionTemplate() {
return transactionTemplate;
}
/**
* Get reference to database access object to work with queue storage on that shard.
*
* @return Reference to database access object to work with the queue.
*/
@Nonnull
public QueueDao getQueueDao() {
return queueDao;
}
/**
* Get database type for that shard.
*
* @return Database type.
*/
@Nonnull
public DatabaseDialect getDatabaseDialect() {
return databaseDialect;
}
/**
* Get queue table schema for that shard.
*
* @return Queue table schema.
*/
@Nonnull
public QueueTableSchema getQueueTableSchema() {
return queueTableSchema;
}
}
19
View Source File : NamedParameterJdbcTemplate.java
License : MIT License
Project Creator : Vip-Augus
License : MIT License
Project Creator : Vip-Augus
/**
* Template clreplaced with a basic set of JDBC operations, allowing the use
* of named parameters rather than traditional '?' placeholders.
*
* <p>This clreplaced delegates to a wrapped {@link #getJdbcOperations() JdbcTemplate}
* once the subsreplacedution from named parameters to JDBC style '?' placeholders is
* done at execution time. It also allows for expanding a {@link java.util.List}
* of values to the appropriate number of placeholders.
*
* <p>The underlying {@link org.springframework.jdbc.core.JdbcTemplate} is
* exposed to allow for convenient access to the traditional
* {@link org.springframework.jdbc.core.JdbcTemplate} methods.
*
* <p><b>NOTE: An instance of this clreplaced is thread-safe once configured.</b>
*
* @author Thomas Risberg
* @author Juergen Hoeller
* @since 2.0
* @see NamedParameterJdbcOperations
* @see org.springframework.jdbc.core.JdbcTemplate
*/
public clreplaced NamedParameterJdbcTemplate implements NamedParameterJdbcOperations {
/**
* Default maximum number of entries for this template's SQL cache: 256.
*/
public static final int DEFAULT_CACHE_LIMIT = 256;
/**
* The JdbcTemplate we are wrapping.
*/
private final JdbcOperations clreplacedicJdbcTemplate;
private volatile int cacheLimit = DEFAULT_CACHE_LIMIT;
/**
* Cache of original SQL String to ParsedSql representation.
*/
@SuppressWarnings("serial")
private final Map<String, ParsedSql> parsedSqlCache = new LinkedHashMap<String, ParsedSql>(DEFAULT_CACHE_LIMIT, 0.75f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<String, ParsedSql> eldest) {
return size() > getCacheLimit();
}
};
/**
* Create a new NamedParameterJdbcTemplate for the given {@link DataSource}.
* <p>Creates a clreplacedic Spring {@link org.springframework.jdbc.core.JdbcTemplate} and wraps it.
* @param dataSource the JDBC DataSource to access
*/
public NamedParameterJdbcTemplate(DataSource dataSource) {
replacedert.notNull(dataSource, "DataSource must not be null");
this.clreplacedicJdbcTemplate = new JdbcTemplate(dataSource);
}
/**
* Create a new NamedParameterJdbcTemplate for the given clreplacedic
* Spring {@link org.springframework.jdbc.core.JdbcTemplate}.
* @param clreplacedicJdbcTemplate the clreplacedic Spring JdbcTemplate to wrap
*/
public NamedParameterJdbcTemplate(JdbcOperations clreplacedicJdbcTemplate) {
replacedert.notNull(clreplacedicJdbcTemplate, "JdbcTemplate must not be null");
this.clreplacedicJdbcTemplate = clreplacedicJdbcTemplate;
}
/**
* Expose the clreplacedic Spring JdbcTemplate operations to allow invocation
* of less commonly used methods.
*/
@Override
public JdbcOperations getJdbcOperations() {
return this.clreplacedicJdbcTemplate;
}
/**
* Expose the clreplacedic Spring {@link JdbcTemplate} itself, if available,
* in particular for preplaceding it on to other {@code JdbcTemplate} consumers.
* <p>If sufficient for the purposes at hand, {@link #getJdbcOperations()}
* is recommended over this variant.
* @since 5.0.3
*/
public JdbcTemplate getJdbcTemplate() {
replacedert.state(this.clreplacedicJdbcTemplate instanceof JdbcTemplate, "No JdbcTemplate available");
return (JdbcTemplate) this.clreplacedicJdbcTemplate;
}
/**
* Specify the maximum number of entries for this template's SQL cache.
* Default is 256.
*/
public void setCacheLimit(int cacheLimit) {
this.cacheLimit = cacheLimit;
}
/**
* Return the maximum number of entries for this template's SQL cache.
*/
public int getCacheLimit() {
return this.cacheLimit;
}
@Override
@Nullable
public <T> T execute(String sql, SqlParameterSource paramSource, PreparedStatementCallback<T> action) throws DataAccessException {
return getJdbcOperations().execute(getPreparedStatementCreator(sql, paramSource), action);
}
@Override
@Nullable
public <T> T execute(String sql, Map<String, ?> paramMap, PreparedStatementCallback<T> action) throws DataAccessException {
return execute(sql, new MapSqlParameterSource(paramMap), action);
}
@Override
@Nullable
public <T> T execute(String sql, PreparedStatementCallback<T> action) throws DataAccessException {
return execute(sql, EmptySqlParameterSource.INSTANCE, action);
}
@Override
@Nullable
public <T> T query(String sql, SqlParameterSource paramSource, ResultSetExtractor<T> rse) throws DataAccessException {
return getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rse);
}
@Override
@Nullable
public <T> T query(String sql, Map<String, ?> paramMap, ResultSetExtractor<T> rse) throws DataAccessException {
return query(sql, new MapSqlParameterSource(paramMap), rse);
}
@Override
@Nullable
public <T> T query(String sql, ResultSetExtractor<T> rse) throws DataAccessException {
return query(sql, EmptySqlParameterSource.INSTANCE, rse);
}
@Override
public void query(String sql, SqlParameterSource paramSource, RowCallbackHandler rch) throws DataAccessException {
getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rch);
}
@Override
public void query(String sql, Map<String, ?> paramMap, RowCallbackHandler rch) throws DataAccessException {
query(sql, new MapSqlParameterSource(paramMap), rch);
}
@Override
public void query(String sql, RowCallbackHandler rch) throws DataAccessException {
query(sql, EmptySqlParameterSource.INSTANCE, rch);
}
@Override
public <T> List<T> query(String sql, SqlParameterSource paramSource, RowMapper<T> rowMapper) throws DataAccessException {
return getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rowMapper);
}
@Override
public <T> List<T> query(String sql, Map<String, ?> paramMap, RowMapper<T> rowMapper) throws DataAccessException {
return query(sql, new MapSqlParameterSource(paramMap), rowMapper);
}
@Override
public <T> List<T> query(String sql, RowMapper<T> rowMapper) throws DataAccessException {
return query(sql, EmptySqlParameterSource.INSTANCE, rowMapper);
}
@Override
@Nullable
public <T> T queryForObject(String sql, SqlParameterSource paramSource, RowMapper<T> rowMapper) throws DataAccessException {
List<T> results = getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rowMapper);
return DataAccessUtils.nullableSingleResult(results);
}
@Override
@Nullable
public <T> T queryForObject(String sql, Map<String, ?> paramMap, RowMapper<T> rowMapper) throws DataAccessException {
return queryForObject(sql, new MapSqlParameterSource(paramMap), rowMapper);
}
@Override
@Nullable
public <T> T queryForObject(String sql, SqlParameterSource paramSource, Clreplaced<T> requiredType) throws DataAccessException {
return queryForObject(sql, paramSource, new SingleColumnRowMapper<>(requiredType));
}
@Override
@Nullable
public <T> T queryForObject(String sql, Map<String, ?> paramMap, Clreplaced<T> requiredType) throws DataAccessException {
return queryForObject(sql, paramMap, new SingleColumnRowMapper<>(requiredType));
}
@Override
public Map<String, Object> queryForMap(String sql, SqlParameterSource paramSource) throws DataAccessException {
Map<String, Object> result = queryForObject(sql, paramSource, new ColumnMapRowMapper());
replacedert.state(result != null, "No result map");
return result;
}
@Override
public Map<String, Object> queryForMap(String sql, Map<String, ?> paramMap) throws DataAccessException {
Map<String, Object> result = queryForObject(sql, paramMap, new ColumnMapRowMapper());
replacedert.state(result != null, "No result map");
return result;
}
@Override
public <T> List<T> queryForList(String sql, SqlParameterSource paramSource, Clreplaced<T> elementType) throws DataAccessException {
return query(sql, paramSource, new SingleColumnRowMapper<>(elementType));
}
@Override
public <T> List<T> queryForList(String sql, Map<String, ?> paramMap, Clreplaced<T> elementType) throws DataAccessException {
return queryForList(sql, new MapSqlParameterSource(paramMap), elementType);
}
@Override
public List<Map<String, Object>> queryForList(String sql, SqlParameterSource paramSource) throws DataAccessException {
return query(sql, paramSource, new ColumnMapRowMapper());
}
@Override
public List<Map<String, Object>> queryForList(String sql, Map<String, ?> paramMap) throws DataAccessException {
return queryForList(sql, new MapSqlParameterSource(paramMap));
}
@Override
public SqlRowSet queryForRowSet(String sql, SqlParameterSource paramSource) throws DataAccessException {
SqlRowSet result = getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), new SqlRowSetResultSetExtractor());
replacedert.state(result != null, "No result");
return result;
}
@Override
public SqlRowSet queryForRowSet(String sql, Map<String, ?> paramMap) throws DataAccessException {
return queryForRowSet(sql, new MapSqlParameterSource(paramMap));
}
@Override
public int update(String sql, SqlParameterSource paramSource) throws DataAccessException {
return getJdbcOperations().update(getPreparedStatementCreator(sql, paramSource));
}
@Override
public int update(String sql, Map<String, ?> paramMap) throws DataAccessException {
return update(sql, new MapSqlParameterSource(paramMap));
}
@Override
public int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder) throws DataAccessException {
return update(sql, paramSource, generatedKeyHolder, null);
}
@Override
public int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder, @Nullable String[] keyColumnNames) throws DataAccessException {
PreparedStatementCreator psc = getPreparedStatementCreator(sql, paramSource, pscf -> {
if (keyColumnNames != null) {
pscf.setGeneratedKeysColumnNames(keyColumnNames);
} else {
pscf.setReturnGeneratedKeys(true);
}
});
return getJdbcOperations().update(psc, generatedKeyHolder);
}
@Override
public int[] batchUpdate(String sql, Map<String, ?>[] batchValues) {
return batchUpdate(sql, SqlParameterSourceUtils.createBatch(batchValues));
}
@Override
public int[] batchUpdate(String sql, SqlParameterSource[] batchArgs) {
if (batchArgs.length == 0) {
return new int[0];
}
ParsedSql parsedSql = getParsedSql(sql);
PreparedStatementCreatorFactory pscf = getPreparedStatementCreatorFactory(parsedSql, batchArgs[0]);
return getJdbcOperations().batchUpdate(pscf.getSql(), new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
Object[] values = NamedParameterUtils.buildValueArray(parsedSql, batchArgs[i], null);
pscf.newPreparedStatementSetter(values).setValues(ps);
}
@Override
public int getBatchSize() {
return batchArgs.length;
}
});
}
/**
* Build a {@link PreparedStatementCreator} based on the given SQL and named parameters.
* <p>Note: Directly called from all {@code query} variants. Delegates to the common
* {@link #getPreparedStatementCreator(String, SqlParameterSource, Consumer)} method.
* @param sql the SQL statement to execute
* @param paramSource container of arguments to bind
* @return the corresponding {@link PreparedStatementCreator}
* @see #getPreparedStatementCreator(String, SqlParameterSource, Consumer)
*/
protected PreparedStatementCreator getPreparedStatementCreator(String sql, SqlParameterSource paramSource) {
return getPreparedStatementCreator(sql, paramSource, null);
}
/**
* Build a {@link PreparedStatementCreator} based on the given SQL and named parameters.
* <p>Note: Used for the {@code update} variant with generated key handling, and also
* delegated from {@link #getPreparedStatementCreator(String, SqlParameterSource)}.
* @param sql the SQL statement to execute
* @param paramSource container of arguments to bind
* @param customizer callback for setting further properties on the
* {@link PreparedStatementCreatorFactory} in use), applied before the
* actual {@code newPreparedStatementCreator} call
* @return the corresponding {@link PreparedStatementCreator}
* @since 5.0.5
* @see #getParsedSql(String)
* @see PreparedStatementCreatorFactory#PreparedStatementCreatorFactory(String, List)
* @see PreparedStatementCreatorFactory#newPreparedStatementCreator(Object[])
*/
protected PreparedStatementCreator getPreparedStatementCreator(String sql, SqlParameterSource paramSource, @Nullable Consumer<PreparedStatementCreatorFactory> customizer) {
ParsedSql parsedSql = getParsedSql(sql);
PreparedStatementCreatorFactory pscf = getPreparedStatementCreatorFactory(parsedSql, paramSource);
if (customizer != null) {
customizer.accept(pscf);
}
Object[] params = NamedParameterUtils.buildValueArray(parsedSql, paramSource, null);
return pscf.newPreparedStatementCreator(params);
}
/**
* Obtain a parsed representation of the given SQL statement.
* <p>The default implementation uses an LRU cache with an upper limit of 256 entries.
* @param sql the original SQL statement
* @return a representation of the parsed SQL statement
*/
protected ParsedSql getParsedSql(String sql) {
if (getCacheLimit() <= 0) {
return NamedParameterUtils.parseSqlStatement(sql);
}
synchronized (this.parsedSqlCache) {
ParsedSql parsedSql = this.parsedSqlCache.get(sql);
if (parsedSql == null) {
parsedSql = NamedParameterUtils.parseSqlStatement(sql);
this.parsedSqlCache.put(sql, parsedSql);
}
return parsedSql;
}
}
/**
* Build a {@link PreparedStatementCreatorFactory} based on the given SQL and named parameters.
* @param parsedSql parsed representation of the given SQL statement
* @param paramSource container of arguments to bind
* @return the corresponding {@link PreparedStatementCreatorFactory}
* @since 5.1.3
* @see #getPreparedStatementCreator(String, SqlParameterSource, Consumer)
* @see #getParsedSql(String)
*/
protected PreparedStatementCreatorFactory getPreparedStatementCreatorFactory(ParsedSql parsedSql, SqlParameterSource paramSource) {
String sqlToUse = NamedParameterUtils.subsreplaceduteNamedParameters(parsedSql, paramSource);
List<SqlParameter> declaredParameters = NamedParameterUtils.buildSqlParameterList(parsedSql, paramSource);
return new PreparedStatementCreatorFactory(sqlToUse, declaredParameters);
}
}
19
View Source File : NamedParameterBatchUpdateUtils.java
License : MIT License
Project Creator : Vip-Augus
License : MIT License
Project Creator : Vip-Augus
public static int[] executeBatchUpdateWithNamedParameters(final ParsedSql parsedSql, final SqlParameterSource[] batchArgs, JdbcOperations jdbcOperations) {
if (batchArgs.length == 0) {
return new int[0];
}
String sqlToUse = NamedParameterUtils.subsreplaceduteNamedParameters(parsedSql, batchArgs[0]);
return jdbcOperations.batchUpdate(sqlToUse, new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
Object[] values = NamedParameterUtils.buildValueArray(parsedSql, batchArgs[i], null);
int[] columnTypes = NamedParameterUtils.buildSqlTypeArray(parsedSql, batchArgs[i]);
setStatementParameters(values, ps, columnTypes);
}
@Override
public int getBatchSize() {
return batchArgs.length;
}
});
}
19
View Source File : PostgresqlExample.java
License : Apache License 2.0
Project Creator : ttddyy
License : Apache License 2.0
Project Creator : ttddyy
@Override
public JdbcOperations getJdbcOperations() {
JdbcOperations jdbcOperations = SERVER.getJdbcOperations();
if (jdbcOperations == null) {
throw new IllegalStateException("JdbcOperations not yet initialized");
}
return jdbcOperations;
}
19
View Source File : JdbcSourceIntegrationTests.java
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
/**
* Integration Tests for JdbcSource. Uses hsqldb as a (real) embedded DB.
*
* @author Thomas Risberg
* @author Artem Bilan
*/
@RunWith(SpringJUnit4ClreplacedRunner.clreplaced)
@SpringBootTest(properties = "spring.datasource.url=jdbc:h2:mem:test", webEnvironment = SpringBootTest.WebEnvironment.NONE)
@DirtiesContext
public abstract clreplaced JdbcSourceIntegrationTests {
protected final ObjectMapper objectMapper = new ObjectMapper();
@Autowired
protected Source source;
@Autowired
protected JdbcOperations jdbcOperations;
@Autowired
protected MessageCollector messageCollector;
@TestPropertySource(properties = "jdbc.query=select id, name from test order by id")
public static clreplaced DefaultBehaviorTests extends JdbcSourceIntegrationTests {
@Test
public void testExtraction() throws Exception {
Message<?> received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
Map<?, ?> payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(1, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(2, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(3, payload.get("ID"));
}
}
@TestPropertySource(properties = { "jdbc.query=select id, name, tag from test where tag is NULL order by id", "jdbc.split=false" })
public static clreplaced SelectAllNoSplitTests extends JdbcSourceIntegrationTests {
@Test
public void testExtraction() throws Exception {
Message<?> received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
CollectionLikeType valueType = TypeFactory.defaultInstance().constructCollectionLikeType(List.clreplaced, Map.clreplaced);
List<Map<?, ?>> payload = this.objectMapper.readValue((String) received.getPayload(), valueType);
replacedertEquals(3, payload.size());
replacedertEquals(1, payload.get(0).get("ID"));
replacedertEquals("John", payload.get(2).get("NAME"));
}
}
@TestPropertySource(properties = { "jdbc.query=select id, name from test order by id", "trigger.fixedDelay=600" })
public static clreplaced SelectAllWithDelayTests extends JdbcSourceIntegrationTests {
@Test
public void testExtraction() throws Exception {
Message<?> received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
Map<?, ?> payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(1, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(2, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(3, payload.get("ID"));
// should not wrap around to the beginning since delay is 60
received = messageCollector.forChannel(source.output()).poll(1, TimeUnit.SECONDS);
replacedertNull(received);
}
}
@TestPropertySource(properties = { "jdbc.query=select id, name from test order by id", "trigger.fixedDelay=1" })
public static clreplaced SelectAllWithMinDelayTests extends JdbcSourceIntegrationTests {
@Test
public void testExtraction() throws Exception {
Message<?> received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
Map<?, ?> payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(1, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(2, payload.get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(3, payload.get("ID"));
// should wrap around to the beginning
received = messageCollector.forChannel(source.output()).poll(2, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
payload = this.objectMapper.readValue((String) received.getPayload(), Map.clreplaced);
replacedertEquals(1, payload.get("ID"));
}
}
@TestPropertySource(properties = { "jdbc.query=select id, name, tag from test where tag is NULL order by id", "jdbc.split=false", "jdbc.maxRowsPerPoll=2", "jdbc.update=update test set tag='1' where id in (:id)" })
public static clreplaced Select2PerPollNoSplitWithUpdateTests extends JdbcSourceIntegrationTests {
@Test
public void testExtraction() throws Exception {
Message<?> received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
replacedertThat(received.getPayload(), Matchers.instanceOf(String.clreplaced));
CollectionLikeType valueType = TypeFactory.defaultInstance().constructCollectionLikeType(List.clreplaced, Map.clreplaced);
List<Map<?, ?>> payload = this.objectMapper.readValue((String) received.getPayload(), valueType);
replacedertEquals(2, payload.size());
replacedertEquals(1, payload.get(0).get("ID"));
replacedertEquals(2, payload.get(1).get("ID"));
received = messageCollector.forChannel(source.output()).poll(10, TimeUnit.SECONDS);
replacedertNotNull(received);
payload = this.objectMapper.readValue((String) received.getPayload(), valueType);
replacedertEquals(1, payload.size());
replacedertEquals(3, payload.get(0).get("ID"));
}
}
@SpringBootApplication
public static clreplaced JdbcSourceApplication {
}
}
19
View Source File : PgcopySinkIntegrationTests.java
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
/**
* Integration Tests for PgcopySink. Only runs if PostgreSQL database is available.
*
* @author Thomas Risberg
*/
@RunWith(SpringRunner.clreplaced)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, clreplacedes = PgcopySinkIntegrationTests.PgcopySinkApplication.clreplaced)
@DirtiesContext
public abstract clreplaced PgcopySinkIntegrationTests {
@ClreplacedRule
public static PostgresTestSupport postgresAvailable = new PostgresTestSupport();
@Autowired
protected Sink channels;
@Autowired
protected JdbcOperations jdbcOperations;
@TestPropertySource(properties = { "pgcopy.table-name=test", "pgcopy.batch-size=1", "pgcopy.initialize=true" })
public static clreplaced BasicPayloadCopyTests extends PgcopySinkIntegrationTests {
@Test
public void testBasicCopy() {
String sent = "hello42";
channels.input().send(MessageBuilder.withPayload(sent).build());
String result = jdbcOperations.queryForObject("select payload from test", String.clreplaced);
replacedert.replacedertThat(result, is("hello42"));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=4", "pgcopy.initialize=true", "pgcopy.columns=id,name,age" })
public static clreplaced PgcopyTextTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyText() {
channels.input().send(MessageBuilder.withPayload("123\tNisse\t25").build());
channels.input().send(MessageBuilder.withPayload("124\tAnna\t21").build());
channels.input().send(MessageBuilder.withPayload("125\tBubba\t22").build());
channels.input().send(MessageBuilder.withPayload("126\tPelle\t32").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
replacedert.replacedertThat(result, is(4));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV" })
public static clreplaced PgcopyCSVTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123,\"Nisse\",25").build());
channels.input().send(MessageBuilder.withPayload("124,\"Anna\",21").build());
channels.input().send(MessageBuilder.withPayload("125,\"Bubba\",22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV" })
public static clreplaced PgcopyNullTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123,\"Nisse\",25").build());
channels.input().send(MessageBuilder.withPayload("124,,21").build());
channels.input().send(MessageBuilder.withPayload("125,\"Bubba\",22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
int nulls = jdbcOperations.queryForObject("select count(*) from names where name is null", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
replacedert.replacedertThat(nulls, is(1));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV", "pgcopy.null-string=null" })
public static clreplaced PgcopyNullStringTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123,\"Nisse\",25").build());
channels.input().send(MessageBuilder.withPayload("124,null,21").build());
channels.input().send(MessageBuilder.withPayload("125,\"Bubba\",22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
int nulls = jdbcOperations.queryForObject("select count(*) from names where name is null", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
replacedert.replacedertThat(nulls, is(1));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV", "pgcopy.delimiter=|" })
public static clreplaced PgcopyDelimiterTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123|\"Nisse\"|25").build());
channels.input().send(MessageBuilder.withPayload("124|\"Anna\"|21").build());
channels.input().send(MessageBuilder.withPayload("125|\"Bubba\"|22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV", "pgcopy.delimiter=\\t" })
public static clreplaced PgcopyEscapedDelimiterTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123\t\"Nisse\"\t25").build());
channels.input().send(MessageBuilder.withPayload("124\t\"Anna\"\t21").build());
channels.input().send(MessageBuilder.withPayload("125\t\"Bubba\"\t22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV", "pgcopy.quote='" })
public static clreplaced PgcopyQuoteTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123,Nisse,25").build());
channels.input().send(MessageBuilder.withPayload("124,'Anna',21").build());
channels.input().send(MessageBuilder.withPayload("125,Bubba,22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
int quoted = jdbcOperations.queryForObject("select count(*) from names where name = 'Anna'", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
replacedert.replacedertThat(quoted, is(1));
}
}
@TestPropertySource(properties = { "pgcopy.tableName=names", "pgcopy.batch-size=3", "pgcopy.initialize=true", "pgcopy.columns=id,name,age", "pgcopy.format=CSV", "pgcopy.escape=\\\\" })
public static clreplaced PgcopyEscapeTests extends PgcopySinkIntegrationTests {
@Test
public void testCopyCSV() {
channels.input().send(MessageBuilder.withPayload("123,Nisse,25").build());
channels.input().send(MessageBuilder.withPayload("124,\"Anna\\\"\",21").build());
channels.input().send(MessageBuilder.withPayload("125,Bubba,22").build());
int result = jdbcOperations.queryForObject("select count(*) from names", Integer.clreplaced);
int quoted = jdbcOperations.queryForObject("select count(*) from names where name = 'Anna\"'", Integer.clreplaced);
replacedert.replacedertThat(result, is(3));
replacedert.replacedertThat(quoted, is(1));
}
}
@SpringBootApplication
public static clreplaced PgcopySinkApplication {
public static void main(String[] args) {
SpringApplication.run(PgcopySinkApplication.clreplaced, args);
}
}
}
19
View Source File : JdbcSinkIntegrationTests.java
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
License : Apache License 2.0
Project Creator : spring-cloud-stream-app-starters
/**
* Integration Tests for JdbcSink. Uses hsqldb as a (real) embedded DB.
*
* @author Eric Bottard
* @author Thomas Risberg
* @author Artem Bilan
* @author Robert St. John
* @author Oliver Flasch
* @author Soby Chacko
* @author Szabolcs Stremler
*/
@RunWith(SpringRunner.clreplaced)
@SpringBootTest(properties = "spring.datasource.url=jdbc:h2:mem:test", webEnvironment = SpringBootTest.WebEnvironment.NONE)
@DirtiesContext
public abstract clreplaced JdbcSinkIntegrationTests {
@Autowired
protected Sink channels;
@Autowired
protected JdbcOperations jdbcOperations;
public static clreplaced DefaultBehavior extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).build());
String result = jdbcOperations.queryForObject("select payload from messages", String.clreplaced);
replacedert.replacedertThat(result, is("hello42"));
}
}
@TestPropertySource(properties = "jdbc.columns=a")
public static clreplaced SendAsByteArrayBehavior extends JdbcSinkIntegrationTests {
@Test
public void testInsertionWhenDataReceivedAsByteArray() {
String hello = "{\"a\": \"hello\"}";
channels.input().send(MessageBuilder.withPayload(hello.getBytes()).build());
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ?", Integer.clreplaced, "hello"), is(1));
}
}
@TestPropertySource(properties = "jdbc.batchSize=1000")
public static clreplaced SimpleBatchInsertTests extends JdbcSinkIntegrationTests {
@Test
public void testBatchInsertion() {
final int numberOfInserts = 5000;
Payload sent = new Payload("hello", 42);
for (int i = 0; i < numberOfInserts; i++) {
channels.input().send(MessageBuilder.withPayload(sent).build());
}
int result = jdbcOperations.queryForObject("select count(*) from messages", Integer.clreplaced);
replacedert.replacedertThat(result, is(numberOfInserts));
}
}
@TestPropertySource(properties = { "jdbc.batchSize=1000", "jdbc.idleTimeout=100" })
public static clreplaced BatchInsertTimeoutTests extends JdbcSinkIntegrationTests {
@Test
public void testBatchInsertionTimeout() throws InterruptedException {
final int numberOfInserts = 10;
Payload sent = new Payload("hello", 42);
for (int i = 0; i < numberOfInserts; i++) {
channels.input().send(MessageBuilder.withPayload(sent).build());
}
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages", Integer.clreplaced), is(0));
// wait 200ms
Thread.sleep(200);
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages", Integer.clreplaced), is(numberOfInserts));
}
}
@TestPropertySource(properties = "jdbc.columns=a,b")
public static clreplaced SimpleMappingTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).build());
Payload result = jdbcOperations.query("select a, b from messages", new BeanPropertyRowMapper<>(Payload.clreplaced)).get(0);
replacedert.replacedertThat(result, samePropertyValuesAs(sent));
}
}
// annotation below relies on java.util.Properties so backslash needs to be doubled
@TestPropertySource(properties = "jdbc.columns=a: a.substring(0\\\\, 4), b: b + 624")
public static clreplaced SpELTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).build());
Payload expected = new Payload("hell", 666);
Payload result = jdbcOperations.query("select a, b from messages", new BeanPropertyRowMapper<>(Payload.clreplaced)).get(0);
replacedert.replacedertThat(result, samePropertyValuesAs(expected));
}
}
@TestPropertySource(properties = "jdbc.columns=a: headers[foo]")
public static clreplaced HeaderInsertTests extends JdbcSinkIntegrationTests {
@Test
public void testHeaderInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).setHeader("foo", "bar").build());
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ?", Integer.clreplaced, "bar"), is(1));
}
}
@TestPropertySource(properties = "jdbc.columns=a,b")
public static clreplaced VaryingInsertTests extends JdbcSinkIntegrationTests {
@Test
@SuppressWarnings("unchecked")
public void testInsertion() {
Payload a = new Payload("hello", 42);
Payload b = new Payload("world", 12);
Payload c = new Payload("bonjour", null);
Payload d = new Payload(null, 22);
channels.input().send(MessageBuilder.withPayload(a).build());
channels.input().send(MessageBuilder.withPayload(b).build());
channels.input().send(MessageBuilder.withPayload(c).build());
channels.input().send(MessageBuilder.withPayload(d).build());
List<Payload> result = jdbcOperations.query("select a, b from messages", new BeanPropertyRowMapper<>(Payload.clreplaced));
replacedert.replacedertThat(result, containsInAnyOrder(samePropertyValuesAs(a), samePropertyValuesAs(b), samePropertyValuesAs(c), samePropertyValuesAs(d)));
}
}
@TestPropertySource(properties = { "jdbc.tableName=no_script", "jdbc.initialize=true", "jdbc.columns=a,b" })
public static clreplaced ImplicitTableCreationTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).build());
Payload result = jdbcOperations.query("select a, b from no_script", new BeanPropertyRowMapper<>(Payload.clreplaced)).get(0);
replacedert.replacedertThat(result, samePropertyValuesAs(sent));
}
}
@TestPropertySource(properties = { "jdbc.tableName=foobar", "jdbc.initialize=clreplacedpath:explicit-script.sql", "jdbc" + ".columns=a,b" })
public static clreplaced ExplicitTableCreationTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Payload sent = new Payload("hello", 42);
channels.input().send(MessageBuilder.withPayload(sent).build());
Payload result = jdbcOperations.query("select a, b from foobar", new BeanPropertyRowMapper<>(Payload.clreplaced)).get(0);
replacedert.replacedertThat(result, samePropertyValuesAs(sent));
}
}
@TestPropertySource(properties = "jdbc.columns=a,b")
public static clreplaced MapPayloadInsertTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
NamedParameterJdbcOperations namedParameterJdbcOperations = new NamedParameterJdbcTemplate(jdbcOperations);
Map<String, Object> mapA = new HashMap<>();
mapA.put("a", "hello1");
mapA.put("b", 42);
Map<String, Object> mapB = new HashMap<>();
mapB.put("a", "hello2");
mapB.put("b", null);
Map<String, Object> mapC = new HashMap<>();
mapC.put("a", "hello3");
channels.input().send(MessageBuilder.withPayload(mapA).build());
channels.input().send(MessageBuilder.withPayload(mapB).build());
channels.input().send(MessageBuilder.withPayload(mapC).build());
replacedert.replacedertThat(namedParameterJdbcOperations.queryForObject("select count(*) from messages where a = :a and b = :b", mapA, Integer.clreplaced), is(1));
replacedert.replacedertThat(namedParameterJdbcOperations.queryForObject("select count(*) from messages where a = :a and b IS NULL", mapB, Integer.clreplaced), is(1));
replacedert.replacedertThat(namedParameterJdbcOperations.queryForObject("select count(*) from messages where a = :a and b IS NULL", mapC, Integer.clreplaced), is(1));
}
}
@TestPropertySource(properties = "jdbc.columns=a,b")
public static clreplaced TuplePayloadInsertTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
Tuple tupleA = TupleBuilder.tuple().of("a", "hello1", "b", 42);
Tuple tupleB = TupleBuilder.tuple().of("a", "hello2", "b", null);
Tuple tupleC = TupleBuilder.tuple().of("a", "hello3");
channels.input().send(MessageBuilder.withPayload(tupleA).build());
channels.input().send(MessageBuilder.withPayload(tupleB).build());
channels.input().send(MessageBuilder.withPayload(tupleC).build());
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b = ?", Integer.clreplaced, tupleA.getString("a"), tupleA.getInt("b")), is(1));
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b IS NULL", Integer.clreplaced, tupleB.getString("a")), is(1));
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b IS NULL", Integer.clreplaced, tupleC.getString("a")), is(1));
}
}
@TestPropertySource(properties = "jdbc.columns=a,b")
public static clreplaced JsonStringPayloadInsertTests extends JdbcSinkIntegrationTests {
@Test
public void testInsertion() {
String stringA = "{\"a\": \"hello1\", \"b\": 42}";
String stringB = "{\"a\": \"hello2\", \"b\": null}";
String stringC = "{\"a\": \"hello3\"}";
channels.input().send(MessageBuilder.withPayload(stringA).build());
channels.input().send(MessageBuilder.withPayload(stringB).build());
channels.input().send(MessageBuilder.withPayload(stringC).build());
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b = ?", Integer.clreplaced, "hello1", 42), is(1));
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b IS NULL", Integer.clreplaced, "hello2"), is(1));
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b IS NULL", Integer.clreplaced, "hello3"), is(1));
}
}
@TestPropertySource(properties = "jdbc.columns=a: new StringBuilder(payload.a).reverse().toString(), b")
public static clreplaced UnqualifiableColumnExpressionTests extends JdbcSinkIntegrationTests {
@Test
public void doesNotFailParsingUnqualifiableExpression() {
// if the app initializes, the test condition preplacedes, but go ahead and apply the column expression anyway
channels.input().send(MessageBuilder.withPayload(new Payload("desrever", 123)).build());
replacedert.replacedertThat(jdbcOperations.queryForObject("select count(*) from messages where a = ? and b = ?", Integer.clreplaced, "reversed", 123), is(1));
}
}
public static clreplaced Payload {
private String a;
private Integer b;
public Payload() {
}
public Payload(String a, Integer b) {
this.a = a;
this.b = b;
}
public String getA() {
return a;
}
public Integer getB() {
return b;
}
public void setA(String a) {
this.a = a;
}
public void setB(Integer b) {
this.b = b;
}
@Override
public String toString() {
return a + b;
}
}
@SpringBootApplication
public static clreplaced JdbcSinkApplication {
}
}
19
View Source File : MsSqlServerExtension.java
License : Apache License 2.0
Project Creator : r2dbc
License : Apache License 2.0
Project Creator : r2dbc
/**
* Test container extension for Microsoft SQL Server.
*/
@SuppressWarnings({ "unchecked", "rawtypes", "deprecation" })
public final clreplaced MsSqlServerExtension implements BeforeAllCallback, AfterAllCallback {
private volatile MSSQLServerContainer<?> containerInstance = null;
private final Supplier<MSSQLServerContainer<?>> container = () -> {
if (this.containerInstance != null) {
return this.containerInstance;
}
return this.containerInstance = new MSSQLServerContainer() {
protected void configure() {
this.addExposedPort(MS_SQL_SERVER_PORT);
this.addEnv("ACCEPT_EULA", "Y");
this.addEnv("SA_PreplacedWORD", getPreplacedword());
this.withReuse(true);
}
};
};
private HikariDataSource dataSource;
private JdbcOperations jdbcOperations;
private final DatabaseContainer sqlServer = External.INSTANCE.isAvailable() ? External.INSTANCE : new TestContainer(this.container.get());
private final boolean useTestContainer = this.sqlServer instanceof TestContainer;
@Override
public void beforeAll(ExtensionContext context) {
initialize();
}
public void initialize() {
if (this.useTestContainer) {
this.container.get().start();
}
HikariDataSource hikariDataSource = new HikariDataSource();
hikariDataSource.setJdbcUrl("jdbc:sqlserver://" + getHost() + ":" + getPort() + ";database=master;sendStringParametersAsUnicode=true");
hikariDataSource.setUsername(getUsername());
hikariDataSource.setPreplacedword(getPreplacedword());
this.dataSource = hikariDataSource;
this.dataSource.setMaximumPoolSize(1);
this.jdbcOperations = new JdbcTemplate(this.dataSource);
}
@Override
public void afterAll(ExtensionContext context) {
}
public MssqlConnectionConfiguration.Builder configBuilder() {
return MssqlConnectionConfiguration.builder().host(getHost()).username(getUsername()).preplacedword(getPreplacedword());
}
public MssqlConnectionConfiguration getConnectionConfiguration() {
return configBuilder().build();
}
public HikariDataSource getDataSource() {
return this.dataSource;
}
@Nullable
public JdbcOperations getJdbcOperations() {
return this.jdbcOperations;
}
public String getHost() {
return this.sqlServer.getHost();
}
public int getPort() {
return this.sqlServer.getPort();
}
public String getUsername() {
return this.sqlServer.getUsername();
}
public String getPreplacedword() {
return this.sqlServer.getPreplacedword();
}
/**
* Interface to be implemented by database providers (provided database, test container).
*/
interface DatabaseContainer {
String getHost();
int getPort();
String getUsername();
String getPreplacedword();
}
/**
* Externally provided SQL Server instance.
*/
static clreplaced External implements DatabaseContainer {
public static final External INSTANCE = new External();
@Override
public String getHost() {
return "localhost";
}
@Override
public int getPort() {
return 1433;
}
@Override
public String getUsername() {
return "sa";
}
@Override
public String getPreplacedword() {
return "A_Str0ng_Required_Preplacedword";
}
/**
* Returns whether this container is available.
*
* @return
*/
@SuppressWarnings("try")
boolean isAvailable() {
try (Socket ignored = new Socket(getHost(), getPort())) {
return true;
} catch (IOException e) {
return false;
}
}
}
/**
* {@link DatabaseContainer} provided by {@link JdbcDatabaseContainer}.
*/
static clreplaced TestContainer implements DatabaseContainer {
private final JdbcDatabaseContainer<?> container;
TestContainer(JdbcDatabaseContainer<?> container) {
this.container = container;
}
@Override
public String getHost() {
return this.container.getContainerIpAddress();
}
@Override
public int getPort() {
return this.container.getMappedPort(MSSQLServerContainer.MS_SQL_SERVER_PORT);
}
@Override
public String getUsername() {
return this.container.getUsername();
}
@Override
public String getPreplacedword() {
return this.container.getPreplacedword();
}
}
}
19
View Source File : H2ServerExtension.java
License : Apache License 2.0
Project Creator : r2dbc
License : Apache License 2.0
Project Creator : r2dbc
public final clreplaced H2ServerExtension implements BeforeAllCallback, AfterAllCallback {
private final String preplacedword = UUID.randomUUID().toString();
private final String url = String.format("mem:%s", UUID.randomUUID().toString());
private final String username = UUID.randomUUID().toString();
private HikariDataSource dataSource;
private JdbcOperations jdbcOperations;
@Override
public void afterAll(ExtensionContext context) {
this.dataSource.close();
}
@Override
public void beforeAll(ExtensionContext context) throws Exception {
this.dataSource = DataSourceBuilder.create().type(HikariDataSource.clreplaced).url(String.format("jdbc:h2:%s;USER=%s;PreplacedWORD=%s;DB_CLOSE_DELAY=-1;TRACE_LEVEL_FILE=4", this.url, this.username, this.preplacedword)).build();
this.dataSource.setMaximumPoolSize(1);
this.jdbcOperations = new JdbcTemplate(this.dataSource);
}
@Nullable
public JdbcOperations getJdbcOperations() {
return this.jdbcOperations;
}
public String getPreplacedword() {
return this.preplacedword;
}
public String getUrl() {
return this.url;
}
public String getUsername() {
return this.username;
}
}
19
View Source File : H2RowTest.java
License : Apache License 2.0
Project Creator : r2dbc
License : Apache License 2.0
Project Creator : r2dbc
private JdbcOperations getJdbcOperations() {
JdbcOperations jdbcOperations = SERVER.getJdbcOperations();
if (jdbcOperations == null) {
throw new IllegalStateException("JdbcOperations not yet initialized");
}
return jdbcOperations;
}
19
View Source File : PostgresqlServerExtension.java
License : Apache License 2.0
Project Creator : pgjdbc
License : Apache License 2.0
Project Creator : pgjdbc
/**
* JUnit Extension to establish a Postgres database context during integration tests.
* Uses either {@link TestContainer Testcontainers} or a {@link External locally available database}.
*/
public final clreplaced PostgresqlServerExtension implements BeforeAllCallback, AfterAllCallback {
static PostgreSQLContainer<?> containerInstance = null;
static Network containerNetwork = null;
private final Supplier<PostgreSQLContainer<?>> container = () -> {
if (PostgresqlServerExtension.containerInstance != null) {
return PostgresqlServerExtension.containerInstance;
}
PostgresqlServerExtension.containerNetwork = Network.newNetwork();
return PostgresqlServerExtension.containerInstance = container();
};
private final DatabaseContainer postgres = getContainer();
private final boolean useTestContainer = this.postgres instanceof TestContainer;
private HikariDataSource dataSource;
private JdbcOperations jdbcOperations;
public PostgresqlServerExtension() {
}
private DatabaseContainer getContainer() {
File testrc = new File(".testrc");
String preference = "testcontainer";
if (testrc.exists()) {
Properties properties = new Properties();
try (FileReader reader = new FileReader(testrc)) {
properties.load(reader);
} catch (IOException e) {
throw new IllegalStateException(e);
}
preference = properties.getProperty("preference", preference);
}
if (preference.equals(External.PREFERENCE)) {
return new External();
}
return new TestContainer(this.container.get());
}
@Override
@SuppressWarnings("rawtypes")
public void afterAll(ExtensionContext context) {
if (this.dataSource != null) {
this.dataSource.close();
}
}
@Override
@SuppressWarnings("rawtypes")
public void beforeAll(ExtensionContext context) {
initialize();
}
public void initialize() {
if (this.useTestContainer) {
this.container.get().start();
}
initializeConnectors();
}
private void initializeConnectors() {
HikariDataSource dataSource = new HikariDataSource();
dataSource.setUsername(getUsername());
dataSource.setPreplacedword(getPreplacedword());
dataSource.setJdbcUrl(String.format("jdbc:postgresql://%s:%d/%s", getHost(), getPort(), getDatabase()));
this.dataSource = dataSource;
this.jdbcOperations = new JdbcTemplate(this.dataSource);
}
public String getClientCrt() {
return getResourcePath("client.crt").toAbsolutePath().toString();
}
public String getClientKey() {
return getResourcePath("client.key").toAbsolutePath().toString();
}
public PostgresqlConnectionConfiguration.Builder configBuilder() {
return PostgresqlConnectionConfiguration.builder().database(getDatabase()).host(getHost()).port(getPort()).username(getUsername()).preplacedword(getPreplacedword());
}
public PostgresqlConnectionConfiguration getConnectionConfiguration() {
return configBuilder().build();
}
public String getDatabase() {
return this.postgres.getDatabase();
}
public DataSource getDataSource() {
return this.dataSource;
}
@Nullable
public JdbcOperations getJdbcOperations() {
return this.jdbcOperations;
}
public String getHost() {
return this.postgres.getHost();
}
public int getPort() {
return this.postgres.getPort();
}
public String getServerCrt() {
return getResourcePath("server.crt").toAbsolutePath().toString();
}
public String getServerKey() {
return getResourcePath("server.key").toAbsolutePath().toString();
}
public String getUsername() {
return this.postgres.getUsername();
}
public String getPreplacedword() {
return this.postgres.getPreplacedword();
}
public DatabaseContainer getPostgres() {
return postgres;
}
private <T extends PostgreSQLContainer<T>> T container() {
T container = new PostgreSQLContainer<T>("postgres:latest").withCopyFileToContainer(getHostPath("server.crt", 0600), "/var/server.crt").withCopyFileToContainer(getHostPath("server.key", 0600), "/var/server.key").withCopyFileToContainer(getHostPath("client.crt", 0600), "/var/client.crt").withCopyFileToContainer(getHostPath("pg_hba.conf", 0600), "/var/pg_hba.conf").withCopyFileToContainer(getHostPath("setup.sh", 0755), "/var/setup.sh").withCopyFileToContainer(getHostPath("test-db-init-script.sql", 0755), "/docker-entrypoint-initdb.d/test-db-init-script.sql").withReuse(true).withNetworkAliases("r2dbc-postgres").withCommand("/var/setup.sh").withNetwork(PostgresqlServerExtension.containerNetwork);
return container;
}
private Path getResourcePath(String name) {
URL resource = getClreplaced().getClreplacedLoader().getResource(name);
if (resource == null) {
throw new IllegalStateException("Resource not found: " + name);
}
try {
return Paths.get(resource.toURI());
} catch (URISyntaxException e) {
throw new IllegalStateException("Cannot convert to path for: " + name, e);
}
}
private MountableFile getHostPath(String name, int mode) {
return forHostPath(getResourcePath(name), mode);
}
/**
* Interface to be implemented by database providers (provided database, test container).
*/
interface DatabaseContainer {
String getHost();
@Nullable
Network getNetwork();
int getPort();
String getDatabase();
String getUsername();
String getPreplacedword();
String getNetworkAlias();
}
/**
* Externally provided Postgres instance.
*/
static clreplaced External implements DatabaseContainer {
public static final String PREFERENCE = "external";
public static final External INSTANCE = new External();
@Override
public String getHost() {
return "localhost";
}
@Override
@Nullable
public Network getNetwork() {
return null;
}
@Override
public int getPort() {
return 5432;
}
@Override
public String getDatabase() {
return "postgres";
}
@Override
public String getUsername() {
return "postgres";
}
@Override
public String getPreplacedword() {
return "postgres";
}
@Override
public String getNetworkAlias() {
return this.getHost();
}
}
/**
* {@link DatabaseContainer} provided by {@link JdbcDatabaseContainer}.
*/
static clreplaced TestContainer implements DatabaseContainer {
public static final String PREFERENCE = "testcontainer";
private final JdbcDatabaseContainer<?> container;
TestContainer(JdbcDatabaseContainer<?> container) {
this.container = container;
}
@Override
public String getHost() {
return this.container.getContainerIpAddress();
}
@Override
public Network getNetwork() {
return this.container.getNetwork();
}
@Override
public int getPort() {
return this.container.getMappedPort(PostgreSQLContainer.POSTGRESQL_PORT);
}
@Override
public String getDatabase() {
return this.container.getDatabaseName();
}
@Override
public String getUsername() {
return this.container.getUsername();
}
@Override
public String getPreplacedword() {
return this.container.getPreplacedword();
}
@Override
public String getNetworkAlias() {
return "r2dbc-postgres";
}
}
}
19
View Source File : PgPoolExtendedQueryPostgresqlStatementIntegrationTests.java
License : Apache License 2.0
Project Creator : pgjdbc
License : Apache License 2.0
Project Creator : pgjdbc
/**
* Integration tests for {@link ExtendedQueryPostgresqlStatement} using PgPool.
*/
final clreplaced PgPoolExtendedQueryPostgresqlStatementIntegrationTests extends ExtendedQueryPostgresqlStatementIntegrationTests {
PgPool pgPool;
HikariDataSource dataSource;
JdbcOperations jdbcOperations;
@BeforeEach
void setUp() {
this.pgPool = new PgPool(SERVER);
this.dataSource = new HikariDataSource();
this.dataSource.setUsername(SERVER.getUsername());
this.dataSource.setPreplacedword(SERVER.getPreplacedword());
this.dataSource.setJdbcUrl(String.format("jdbc:postgresql://%s:%d/%s", this.pgPool.getHost(), this.pgPool.getPort(), SERVER.getDatabase()));
this.jdbcOperations = new JdbcTemplate(this.dataSource);
super.setUp();
}
@AfterEach
void tearDown() {
super.tearDown();
this.dataSource.close();
this.pgPool.close();
}
@Override
protected void customize(PostgresqlConnectionConfiguration.Builder builder) {
builder.compatibilityMode(true);
builder.host(this.pgPool.getHost()).port(this.pgPool.getPort());
}
}
19
View Source File : JdbcEventDao.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A jdbc implementation of {@link EventDao}.
*
* @author Rob Winch
*/
@Repository
public clreplaced JdbcEventDao implements EventDao {
// --- members ---
private final JdbcOperations jdbcOperations;
// --- constructors ---
@Autowired
public JdbcEventDao(JdbcOperations jdbcOperations) {
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
this.jdbcOperations = jdbcOperations;
}
// --- EventService ---
@Override
@Transactional(readOnly = true)
public Event getEvent(int eventId) {
return jdbcOperations.queryForObject(EVENT_QUERY + " and e.id = ?", EVENT_ROW_MAPPER, eventId);
}
@Override
public int createEvent(final Event event) {
if (event == null) {
throw new IllegalArgumentException("event cannot be null");
}
if (event.getId() != null) {
throw new IllegalArgumentException("event.getId() must be null when creating a new Message");
}
final CalendarUser owner = event.getOwner();
if (owner == null) {
throw new IllegalArgumentException("event.getOwner() cannot be null");
}
final CalendarUser attendee = event.getAttendee();
if (attendee == null) {
throw new IllegalArgumentException("attendee.getOwner() cannot be null");
}
final Calendar when = event.getWhen();
if (when == null) {
throw new IllegalArgumentException("event.getWhen() cannot be null");
}
KeyHolder keyHolder = new GeneratedKeyHolder();
this.jdbcOperations.update(new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
PreparedStatement ps = connection.prepareStatement("insert into events (when,summary,description,owner,attendee) values (?, ?, ?, ?, ?)", new String[] { "id" });
ps.setDate(1, new java.sql.Date(when.getTimeInMillis()));
ps.setString(2, event.getSummary());
ps.setString(3, event.getDescription());
ps.setInt(4, owner.getId());
ps.setObject(5, attendee == null ? null : attendee.getId());
return ps;
}
}, keyHolder);
return keyHolder.getKey().intValue();
}
@Override
@Transactional(readOnly = true)
public List<Event> findForUser(int userId) {
return jdbcOperations.query(EVENT_QUERY + " and (e.owner = ? or e.attendee = ?) order by e.id", EVENT_ROW_MAPPER, userId, userId);
}
@Override
@Transactional(readOnly = true)
public List<Event> getEvents() {
return jdbcOperations.query(EVENT_QUERY + " order by e.id", EVENT_ROW_MAPPER);
}
// --- non-public static members ---
/**
* A RowMapper for mapping a {@link Event}
*/
private static final RowMapper<Event> EVENT_ROW_MAPPER = new RowMapper<Event>() {
public Event mapRow(ResultSet rs, int rowNum) throws SQLException {
CalendarUser attendee = ATTENDEE_ROW_MAPPER.mapRow(rs, rowNum);
CalendarUser owner = OWNER_ROW_MAPPER.mapRow(rs, rowNum);
Event event = new Event();
event.setId(rs.getInt("events.id"));
event.setSummary(rs.getString("events.summary"));
event.setDescription(rs.getString("events.description"));
Calendar when = Calendar.getInstance();
when.setTime(rs.getDate("events.when"));
event.setWhen(when);
event.setAttendee(attendee);
event.setOwner(owner);
return event;
}
};
private static final RowMapper<CalendarUser> ATTENDEE_ROW_MAPPER = new JdbcCalendarUserDao.CalendarUserRowMapper("attendee_");
private static final RowMapper<CalendarUser> OWNER_ROW_MAPPER = new JdbcCalendarUserDao.CalendarUserRowMapper("owner_");
private static final String EVENT_QUERY = "select e.id, e.summary, e.description, e.when, " + "owner.id as owner_id, owner.email as owner_email, owner.preplacedword as owner_preplacedword, owner.first_name as owner_first_name, owner.last_name as owner_last_name, " + "attendee.id as attendee_id, attendee.email as attendee_email, attendee.preplacedword as attendee_preplacedword, attendee.first_name as attendee_first_name, attendee.last_name as attendee_last_name " + "from events as e, calendar_users as owner, calendar_users as attendee " + "where e.owner = owner.id and e.attendee = attendee.id";
}
19
View Source File : JdbcCalendarUserDao.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A jdbc implementation of {@link CalendarUserDao}.
*
* @author Rob Winch
*/
@Repository
public clreplaced JdbcCalendarUserDao implements CalendarUserDao {
// --- members ---
private final JdbcOperations jdbcOperations;
// --- constructors ---
@Autowired
public JdbcCalendarUserDao(JdbcOperations jdbcOperations) {
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
this.jdbcOperations = jdbcOperations;
}
// --- CalendarUserDao methods ---
@Override
@Transactional(readOnly = true)
public CalendarUser getUser(int id) {
return jdbcOperations.queryForObject(CALENDAR_USER_QUERY + "id = ?", CALENDAR_USER_MAPPER, id);
}
@Override
@Transactional(readOnly = true)
public CalendarUser findUserByEmail(String email) {
if (email == null) {
throw new IllegalArgumentException("email cannot be null");
}
try {
return jdbcOperations.queryForObject(CALENDAR_USER_QUERY + "email = ?", CALENDAR_USER_MAPPER, email);
} catch (EmptyResultDataAccessException notFound) {
return null;
}
}
@Override
@Transactional(readOnly = true)
public List<CalendarUser> findUsersByEmail(String email) {
if (email == null) {
throw new IllegalArgumentException("email cannot be null");
}
if ("".equals(email)) {
throw new IllegalArgumentException("email cannot be empty string");
}
return jdbcOperations.query(CALENDAR_USER_QUERY + "email like ? order by id", CALENDAR_USER_MAPPER, email + "%");
}
@Override
public int createUser(final CalendarUser userToAdd) {
if (userToAdd == null) {
throw new IllegalArgumentException("userToAdd cannot be null");
}
if (userToAdd.getId() != null) {
throw new IllegalArgumentException("userToAdd.getId() must be null when creating a " + CalendarUser.clreplaced.getName());
}
KeyHolder keyHolder = new GeneratedKeyHolder();
this.jdbcOperations.update(new PreparedStatementCreator() {
public PreparedStatement createPreparedStatement(Connection connection) throws SQLException {
PreparedStatement ps = connection.prepareStatement("insert into calendar_users (email, preplacedword, first_name, last_name) values (?, ?, ?, ?)", new String[] { "id" });
ps.setString(1, userToAdd.getEmail());
ps.setString(2, userToAdd.getPreplacedword());
ps.setString(3, userToAdd.getFirstName());
ps.setString(4, userToAdd.getLastName());
return ps;
}
}, keyHolder);
return keyHolder.getKey().intValue();
}
// --- non-public static members ---
private static final String CALENDAR_USER_QUERY = "select id, email, preplacedword, first_name, last_name from calendar_users where ";
private static final RowMapper<CalendarUser> CALENDAR_USER_MAPPER = new CalendarUserRowMapper("calendar_users.");
/**
* Create a new RowMapper that resolves {@link CalendarUser}'s given a column label prefix. By allowing the prefix
* to be specified we can reuse the same {@link RowMapper} for joins in other tables.
*
* @author Rob Winch
*/
static clreplaced CalendarUserRowMapper implements RowMapper<CalendarUser> {
private final String columnLabelPrefix;
/**
* Creates a new instance that allows for a custom prefix for the columnLabel.
*
* @param columnLabelPrefix
*/
public CalendarUserRowMapper(String columnLabelPrefix) {
this.columnLabelPrefix = columnLabelPrefix;
}
public CalendarUser mapRow(ResultSet rs, int rowNum) throws SQLException {
CalendarUser user = new CalendarUser();
user.setId(rs.getInt(columnLabelPrefix + "id"));
user.setEmail(rs.getString(columnLabelPrefix + "email"));
user.setPreplacedword(rs.getString(columnLabelPrefix + "preplacedword"));
user.setFirstName(rs.getString(columnLabelPrefix + "first_name"));
user.setLastName(rs.getString(columnLabelPrefix + "last_name"));
return user;
}
}
}
19
View Source File : NamedParameterJdbcTemplate.java
License : Apache License 2.0
Project Creator : langtianya
License : Apache License 2.0
Project Creator : langtianya
/**
* Template clreplaced with a basic set of JDBC operations, allowing the use
* of named parameters rather than traditional '?' placeholders.
*
* <p>This clreplaced delegates to a wrapped {@link #getJdbcOperations() JdbcTemplate}
* once the subsreplacedution from named parameters to JDBC style '?' placeholders is
* done at execution time. It also allows for expanding a {@link java.util.List}
* of values to the appropriate number of placeholders.
*
* <p>The underlying {@link org.springframework.jdbc.core.JdbcTemplate} is
* exposed to allow for convenient access to the traditional
* {@link org.springframework.jdbc.core.JdbcTemplate} methods.
*
* <p><b>NOTE: An instance of this clreplaced is thread-safe once configured.</b>
*
* @author Thomas Risberg
* @author Juergen Hoeller
* @since 2.0
* @see NamedParameterJdbcOperations
* @see org.springframework.jdbc.core.JdbcTemplate
*/
public clreplaced NamedParameterJdbcTemplate implements NamedParameterJdbcOperations {
/**
* Default maximum number of entries for this template's SQL cache: 256
*/
public static final int DEFAULT_CACHE_LIMIT = 256;
/**
* The JdbcTemplate we are wrapping
*/
private final JdbcOperations clreplacedicJdbcTemplate;
private volatile int cacheLimit = DEFAULT_CACHE_LIMIT;
/**
* Cache of original SQL String to ParsedSql representation
*/
@SuppressWarnings("serial")
private final Map<String, ParsedSql> parsedSqlCache = new LinkedHashMap<String, ParsedSql>(DEFAULT_CACHE_LIMIT, 0.75f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<String, ParsedSql> eldest) {
return size() > getCacheLimit();
}
};
/**
* Create a new NamedParameterJdbcTemplate for the given {@link DataSource}.
* <p>Creates a clreplacedic Spring {@link org.springframework.jdbc.core.JdbcTemplate} and wraps it.
* @param dataSource the JDBC DataSource to access
*/
public NamedParameterJdbcTemplate(DataSource dataSource) {
replacedert.notNull(dataSource, "DataSource must not be null");
this.clreplacedicJdbcTemplate = new JdbcTemplate(dataSource);
}
/**
* Create a new NamedParameterJdbcTemplate for the given clreplacedic
* Spring {@link org.springframework.jdbc.core.JdbcTemplate}.
* @param clreplacedicJdbcTemplate the clreplacedic Spring JdbcTemplate to wrap
*/
public NamedParameterJdbcTemplate(JdbcOperations clreplacedicJdbcTemplate) {
replacedert.notNull(clreplacedicJdbcTemplate, "JdbcTemplate must not be null");
this.clreplacedicJdbcTemplate = clreplacedicJdbcTemplate;
}
/**
* Expose the clreplacedic Spring JdbcTemplate to allow invocation of
* less commonly used methods.
*/
@Override
public JdbcOperations getJdbcOperations() {
return this.clreplacedicJdbcTemplate;
}
/**
* Specify the maximum number of entries for this template's SQL cache.
* Default is 256.
*/
public void setCacheLimit(int cacheLimit) {
this.cacheLimit = cacheLimit;
}
/**
* Return the maximum number of entries for this template's SQL cache.
*/
public int getCacheLimit() {
return this.cacheLimit;
}
@Override
public <T> T execute(String sql, SqlParameterSource paramSource, PreparedStatementCallback<T> action) throws DataAccessException {
return getJdbcOperations().execute(getPreparedStatementCreator(sql, paramSource), action);
}
@Override
public <T> T execute(String sql, Map<String, ?> paramMap, PreparedStatementCallback<T> action) throws DataAccessException {
return execute(sql, new MapSqlParameterSource(paramMap), action);
}
@Override
public <T> T execute(String sql, PreparedStatementCallback<T> action) throws DataAccessException {
return execute(sql, EmptySqlParameterSource.INSTANCE, action);
}
@Override
public <T> T query(String sql, SqlParameterSource paramSource, ResultSetExtractor<T> rse) throws DataAccessException {
return getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rse);
}
@Override
public <T> T query(String sql, Map<String, ?> paramMap, ResultSetExtractor<T> rse) throws DataAccessException {
return query(sql, new MapSqlParameterSource(paramMap), rse);
}
@Override
public <T> T query(String sql, ResultSetExtractor<T> rse) throws DataAccessException {
return query(sql, EmptySqlParameterSource.INSTANCE, rse);
}
@Override
public void query(String sql, SqlParameterSource paramSource, RowCallbackHandler rch) throws DataAccessException {
getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rch);
}
@Override
public void query(String sql, Map<String, ?> paramMap, RowCallbackHandler rch) throws DataAccessException {
query(sql, new MapSqlParameterSource(paramMap), rch);
}
@Override
public void query(String sql, RowCallbackHandler rch) throws DataAccessException {
query(sql, EmptySqlParameterSource.INSTANCE, rch);
}
@Override
public <T> List<T> query(String sql, SqlParameterSource paramSource, RowMapper<T> rowMapper) throws DataAccessException {
return getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rowMapper);
}
@Override
public <T> List<T> query(String sql, Map<String, ?> paramMap, RowMapper<T> rowMapper) throws DataAccessException {
return query(sql, new MapSqlParameterSource(paramMap), rowMapper);
}
@Override
public <T> List<T> query(String sql, RowMapper<T> rowMapper) throws DataAccessException {
return query(sql, EmptySqlParameterSource.INSTANCE, rowMapper);
}
@Override
public <T> T queryForObject(String sql, SqlParameterSource paramSource, RowMapper<T> rowMapper) throws DataAccessException {
List<T> results = getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), rowMapper);
return DataAccessUtils.requiredSingleResult(results);
}
@Override
public <T> T queryForObject(String sql, Map<String, ?> paramMap, RowMapper<T> rowMapper) throws DataAccessException {
return queryForObject(sql, new MapSqlParameterSource(paramMap), rowMapper);
}
@Override
public <T> T queryForObject(String sql, SqlParameterSource paramSource, Clreplaced<T> requiredType) throws DataAccessException {
return queryForObject(sql, paramSource, new SingleColumnRowMapper<T>(requiredType));
}
@Override
public <T> T queryForObject(String sql, Map<String, ?> paramMap, Clreplaced<T> requiredType) throws DataAccessException {
return queryForObject(sql, paramMap, new SingleColumnRowMapper<T>(requiredType));
}
@Override
public Map<String, Object> queryForMap(String sql, SqlParameterSource paramSource) throws DataAccessException {
return queryForObject(sql, paramSource, new ColumnMapRowMapper());
}
@Override
public Map<String, Object> queryForMap(String sql, Map<String, ?> paramMap) throws DataAccessException {
return queryForObject(sql, paramMap, new ColumnMapRowMapper());
}
@Override
public <T> List<T> queryForList(String sql, SqlParameterSource paramSource, Clreplaced<T> elementType) throws DataAccessException {
return query(sql, paramSource, new SingleColumnRowMapper<T>(elementType));
}
@Override
public <T> List<T> queryForList(String sql, Map<String, ?> paramMap, Clreplaced<T> elementType) throws DataAccessException {
return queryForList(sql, new MapSqlParameterSource(paramMap), elementType);
}
@Override
public List<Map<String, Object>> queryForList(String sql, SqlParameterSource paramSource) throws DataAccessException {
return query(sql, paramSource, new ColumnMapRowMapper());
}
@Override
public List<Map<String, Object>> queryForList(String sql, Map<String, ?> paramMap) throws DataAccessException {
return queryForList(sql, new MapSqlParameterSource(paramMap));
}
@Override
public SqlRowSet queryForRowSet(String sql, SqlParameterSource paramSource) throws DataAccessException {
return getJdbcOperations().query(getPreparedStatementCreator(sql, paramSource), new SqlRowSetResultSetExtractor());
}
@Override
public SqlRowSet queryForRowSet(String sql, Map<String, ?> paramMap) throws DataAccessException {
return queryForRowSet(sql, new MapSqlParameterSource(paramMap));
}
@Override
public int update(String sql, SqlParameterSource paramSource) throws DataAccessException {
return getJdbcOperations().update(getPreparedStatementCreator(sql, paramSource));
}
@Override
public int update(String sql, Map<String, ?> paramMap) throws DataAccessException {
return update(sql, new MapSqlParameterSource(paramMap));
}
@Override
public int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder) throws DataAccessException {
return update(sql, paramSource, generatedKeyHolder, null);
}
@Override
public int update(String sql, SqlParameterSource paramSource, KeyHolder generatedKeyHolder, String[] keyColumnNames) throws DataAccessException {
ParsedSql parsedSql = getParsedSql(sql);
String sqlToUse = NamedParameterUtils.subsreplaceduteNamedParameters(parsedSql, paramSource);
Object[] params = NamedParameterUtils.buildValueArray(parsedSql, paramSource, null);
List<SqlParameter> declaredParameters = NamedParameterUtils.buildSqlParameterList(parsedSql, paramSource);
PreparedStatementCreatorFactory pscf = new PreparedStatementCreatorFactory(sqlToUse, declaredParameters);
if (keyColumnNames != null) {
pscf.setGeneratedKeysColumnNames(keyColumnNames);
} else {
pscf.setReturnGeneratedKeys(true);
}
return getJdbcOperations().update(pscf.newPreparedStatementCreator(params), generatedKeyHolder);
}
@Override
public int[] batchUpdate(String sql, Map<String, ?>[] batchValues) {
SqlParameterSource[] batchArgs = new SqlParameterSource[batchValues.length];
int i = 0;
for (Map<String, ?> values : batchValues) {
batchArgs[i] = new MapSqlParameterSource(values);
i++;
}
return batchUpdate(sql, batchArgs);
}
@Override
public int[] batchUpdate(String sql, SqlParameterSource[] batchArgs) {
ParsedSql parsedSql = getParsedSql(sql);
return NamedParameterBatchUpdateUtils.executeBatchUpdateWithNamedParameters(parsedSql, batchArgs, getJdbcOperations());
}
/**
* Build a PreparedStatementCreator based on the given SQL and named parameters.
* <p>Note: Not used for the {@code update} variant with generated key handling.
* @param sql SQL to execute
* @param paramSource container of arguments to bind
* @return the corresponding PreparedStatementCreator
*/
protected PreparedStatementCreator getPreparedStatementCreator(String sql, SqlParameterSource paramSource) {
ParsedSql parsedSql = getParsedSql(sql);
String sqlToUse = NamedParameterUtils.subsreplaceduteNamedParameters(parsedSql, paramSource);
Object[] params = NamedParameterUtils.buildValueArray(parsedSql, paramSource, null);
List<SqlParameter> declaredParameters = NamedParameterUtils.buildSqlParameterList(parsedSql, paramSource);
PreparedStatementCreatorFactory pscf = new PreparedStatementCreatorFactory(sqlToUse, declaredParameters);
return pscf.newPreparedStatementCreator(params);
}
/**
* Obtain a parsed representation of the given SQL statement.
* <p>The default implementation uses an LRU cache with an upper limit
* of 256 entries.
* @param sql the original SQL
* @return a representation of the parsed SQL statement
*/
protected ParsedSql getParsedSql(String sql) {
if (getCacheLimit() <= 0) {
return NamedParameterUtils.parseSqlStatement(sql);
}
synchronized (this.parsedSqlCache) {
ParsedSql parsedSql = this.parsedSqlCache.get(sql);
if (parsedSql == null) {
parsedSql = NamedParameterUtils.parseSqlStatement(sql);
this.parsedSqlCache.put(sql, parsedSql);
}
return parsedSql;
}
}
}
19
View Source File : NamedParameterBatchUpdateUtils.java
License : Apache License 2.0
Project Creator : langtianya
License : Apache License 2.0
Project Creator : langtianya
public static int[] executeBatchUpdateWithNamedParameters(final ParsedSql parsedSql, final SqlParameterSource[] batchArgs, JdbcOperations jdbcOperations) {
if (batchArgs.length <= 0) {
return new int[] { 0 };
}
String sqlToUse = NamedParameterUtils.subsreplaceduteNamedParameters(parsedSql, batchArgs[0]);
return jdbcOperations.batchUpdate(sqlToUse, new BatchPreparedStatementSetter() {
@Override
public void setValues(PreparedStatement ps, int i) throws SQLException {
Object[] values = NamedParameterUtils.buildValueArray(parsedSql, batchArgs[i], null);
int[] columnTypes = NamedParameterUtils.buildSqlTypeArray(parsedSql, batchArgs[i]);
setStatementParameters(values, ps, columnTypes);
}
@Override
public int getBatchSize() {
return batchArgs.length;
}
});
}
19
View Source File : NamedParameterJdbcTemplate.java
License : GNU Lesser General Public License v3.0
Project Creator : fangjinuo
License : GNU Lesser General Public License v3.0
Project Creator : fangjinuo
/**
* Expose the clreplacedic Spring {@link org.springframework.jdbc.core.JdbcTemplate} itself, if available,
* in particular for preplaceding it on to other {@code JdbcTemplate} consumers.
* <p>If sufficient for the purposes at hand, {@link #getJdbcOperations()}
* is recommended over this variant.
*
* @since 3.2.5
*/
public org.springframework.jdbc.core.JdbcTemplate getJdbcTemplate() {
JdbcOperations operations = getJdbcOperations();
Preconditions.checkArgument(operations instanceof org.springframework.jdbc.core.JdbcTemplate, "No JdbcTemplate available");
return (org.springframework.jdbc.core.JdbcTemplate) operations;
}
19
View Source File : JoinTest.java
License : Apache License 2.0
Project Creator : f0rb
License : Apache License 2.0
Project Creator : f0rb
/**
* JoinTest
*
* @author f0rb on 2020-04-11
*/
public clreplaced JoinTest extends DemoApplicationTest {
@Resource
private JdbcOperations jdbcOperations;
@Test
void queryForJoin() {
TestJoinQuery query = new TestJoinQuery();
query.setSort("userCount,desc");
List<UserCountByRoleView> list = new JoinQueryService<>(jdbcOperations, UserCountByRoleView.clreplaced).query(query);
replacedertThat(list).extracting(UserCountByRoleView::getUserCount).containsExactly(3, 2);
}
@Test
void pageForJoin() {
TestJoinQuery testJoinQuery = new TestJoinQuery();
testJoinQuery.setRoleName("高级");
PageList<TestJoinView> page = new JoinQueryService<>(jdbcOperations, TestJoinView.clreplaced).page(testJoinQuery);
replacedertThat(page.getTotal()).isEqualTo(2);
replacedertThat(page.getList()).extracting(TestJoinView::getUsername).containsExactly("f0rb", "user4");
replacedertThat(testJoinQuery.getPageNumber()).isEqualTo(0);
replacedertThat(testJoinQuery.getPageSize()).isEqualTo(10);
}
/*=============== Cache ==================*/
@Test
void defaultNoCache() throws IllegalAccessException {
RoleController roleController = wac.getBean(RoleController.clreplaced);
CacheWrapper<?> enreplacedyCacheWrapper = (CacheWrapper<?>) FieldUtils.readField(roleController, "enreplacedyCacheWrapper", true);
replacedertThat(enreplacedyCacheWrapper.getCache()).isInstanceOf(NoOpCache.clreplaced);
}
}
19
View Source File : JoinQueryService.java
License : Apache License 2.0
Project Creator : f0rb
License : Apache License 2.0
Project Creator : f0rb
/**
* JoinQueryExecutor
*
* @author f0rb on 2019-06-09
*/
@AllArgsConstructor
public clreplaced JoinQueryService<E, Q extends PageQuery> {
@Autowired
private JdbcOperations jdbcOperations;
private final JoinQueryBuilder joinQueryBuilder;
private BeanPropertyRowMapper<E> beanPropertyRowMapper;
public JoinQueryService(Clreplaced<E> enreplacedyClreplaced) {
this.joinQueryBuilder = new JoinQueryBuilder(enreplacedyClreplaced);
this.beanPropertyRowMapper = new BeanPropertyRowMapper<>(enreplacedyClreplaced);
}
public JoinQueryService(JdbcOperations jdbcOperations, Clreplaced<E> enreplacedyClreplaced) {
this(enreplacedyClreplaced);
this.jdbcOperations = jdbcOperations;
}
public PageList<E> page(Q q) {
q.forcePaging();
return new PageList<>(query(q), count(q));
}
public List<E> query(Q q) {
SqlAndArgs sqlAndArgs = buildJoinSelectAndArgs(q);
return jdbcOperations.query(sqlAndArgs.getSql(), beanPropertyRowMapper, sqlAndArgs.getArgs());
}
public long count(Q q) {
SqlAndArgs sqlAndArgs = joinQueryBuilder.buildJoinCountAndArgs(q);
return jdbcOperations.queryForObject(sqlAndArgs.getSql(), sqlAndArgs.getArgs(), Long.clreplaced);
}
public SqlAndArgs buildJoinSelectAndArgs(Q q) {
return joinQueryBuilder.buildJoinSelectAndArgs(q);
}
}
19
View Source File : StaffRepositoryImpl.java
License : Apache License 2.0
Project Creator : brainysoon
License : Apache License 2.0
Project Creator : brainysoon
/**
* Created by brainy on 17-2-17.
*/
@Repository
public clreplaced StaffRepositoryImpl implements StaffRepository, RowMapper<Staff> {
// 数据库
private static final String _ID = "_id";
private static final String STAFF_NAME = "staffname";
private static final String PreplacedWORD = "preplacedword";
private static final String BIRTHDAY = "birthday";
private static final String DATEIN = "datein";
private static final String ISSUPER = "issuper";
private JdbcOperations jdbcOperations;
@Autowired
public void setJdbcOperations(JdbcOperations jdbcOperations) {
this.jdbcOperations = jdbcOperations;
}
@Override
public Staff findStaffById(String _id) {
Staff staff = null;
try {
staff = jdbcOperations.queryForObject(QUERY_STAFF_BY_ID, new String[] { _id }, this);
} catch (Exception e) {
e.printStackTrace();
}
return staff;
}
@Override
public Staff mapRow(ResultSet rs, int rowNum) throws SQLException {
Staff staff = new Staff();
staff.set_id(rs.getString(_ID));
staff.setStaffname(rs.getString(STAFF_NAME));
staff.setPreplacedword(rs.getString(PreplacedWORD));
staff.setBirthday(rs.getDate(BIRTHDAY));
staff.setDatein(rs.getDate(DATEIN));
staff.setIssuper(rs.getInt(ISSUPER));
return staff;
}
@Override
public List<Staff> queryAllStaff() {
try {
return jdbcOperations.query(QUERY_ALL_STAFF, this);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
@Override
public int addStaff(Staff staff) {
try {
return jdbcOperations.update(ADD_STAFF, staff.get_id(), staff.getStaffname(), staff.getPreplacedword(), staff.getBirthday(), staff.getDatein(), staff.getIssuper());
} catch (Exception ex) {
ex.printStackTrace();
}
return -1;
}
@Override
public int deleteStaffById(String _id) {
try {
return jdbcOperations.update(DELETE_STAFF_BY_ID, _id);
} catch (Exception ex) {
ex.printStackTrace();
}
return -1;
}
}
19
View Source File : StaffRepositoryImpl.java
License : Apache License 2.0
Project Creator : brainysoon
License : Apache License 2.0
Project Creator : brainysoon
@Autowired
public void setJdbcOperations(JdbcOperations jdbcOperations) {
this.jdbcOperations = jdbcOperations;
}
19
View Source File : LogRepositoryImpl.java
License : Apache License 2.0
Project Creator : brainysoon
License : Apache License 2.0
Project Creator : brainysoon
/**
* Created by brainy on 17-2-20.
*/
@Repository
public clreplaced LogRepositoryImpl implements LogRepository, RowMapper<Log> {
private static final String _ID = "_id";
private static final String MANAGE_ID = "manage_id";
private static final String OBJECT_ID = "object_id";
private static final String LOG_DATE = "logdate";
private static final String CONTENT = "content";
private JdbcOperations jdbcOperations;
@Autowired
public void setJdbcOperations(JdbcOperations jdbcOperations) {
this.jdbcOperations = jdbcOperations;
}
@Override
public int addLog(Log log) {
try {
return jdbcOperations.update(ADD_LOG, log.get_id(), log.getManage_id(), log.getObject_id(), log.getLogdate(), log.getContent());
} catch (Exception ex) {
ex.printStackTrace();
}
return -1;
}
@Override
public Log mapRow(ResultSet rs, int rowNum) throws SQLException {
Log log = new Log();
log.set_id(rs.getString(_ID));
log.setManage_id(rs.getString(MANAGE_ID));
log.setObject_id(rs.getString(OBJECT_ID));
log.setLogdate(rs.getTimestamp(LOG_DATE));
log.setContent(rs.getString(CONTENT));
return log;
}
}
19
View Source File : GoodsRepositoryImpl.java
License : Apache License 2.0
Project Creator : brainysoon
License : Apache License 2.0
Project Creator : brainysoon
/**
* Created by brainy on 17-2-18.
*/
@Repository
public clreplaced GoodsRepositoryImpl implements GoodsRepository, RowMapper<Goods> {
private static final String _ID = "_id";
private static final String GOODS_NAME = "goodsname";
private static final String COUNT = "count";
private static final String GOODS_CLreplaced = "goodsclreplaced";
private static final String USEFUL_LIFE = "usefullife";
private static final String DATE_PRODUCED = "dateproduced";
private static final String POSITION = "position";
private static final String PRICE = "price";
private JdbcOperations jdbcOperations;
@Autowired
public void setJdbcOperations(JdbcOperations jdbcOperations) {
this.jdbcOperations = jdbcOperations;
}
@Override
public Goods mapRow(ResultSet rs, int rowNum) throws SQLException {
Goods goods = new Goods();
goods.set_id(rs.getString(_ID));
goods.setGoodsname(rs.getString(GOODS_NAME));
goods.setCount(rs.getInt(COUNT));
goods.setGoodsclreplaced(rs.getString(GOODS_CLreplaced));
goods.setUsefullife(rs.getInt(USEFUL_LIFE));
goods.setDateproduced(rs.getDate(DATE_PRODUCED));
goods.setPosition(rs.getInt(POSITION));
goods.setPrice(rs.getDouble(PRICE));
return goods;
}
@Override
public int saveGoods(Goods goods) {
return jdbcOperations.update(SAVE_GOODS, goods.get_id(), goods.getGoodsname(), goods.getCount(), goods.getGoodsclreplaced(), goods.getPrice(), goods.getUsefullife(), goods.getDateproduced(), goods.getPosition());
}
@Override
public Goods findGoods(String _id, Integer position) {
try {
return jdbcOperations.queryForObject(FIND_GOODS, new String[] { _id, position.toString() }, this);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
@Override
public int pickGoods(Goods goods) {
try {
return jdbcOperations.update(PICK_GOODS, goods.getCount().toString(), goods.get_id(), goods.getPosition().toString());
} catch (Exception ex) {
ex.printStackTrace();
}
return 0;
}
@Override
public List<Goods> findGoodsByKey(String key) {
try {
// 查询所有货物
if (key.equals("")) {
return jdbcOperations.query(FIND_GOODS_BY_NULL_KEY, this);
}
return jdbcOperations.query(FIND_GOODS_BY_KEY, new String[] { key, key, key }, this);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
@Override
public List<Goods> findGoodsScraped() {
try {
return jdbcOperations.query(FIND_GOODS_SHOULD_SCRAPED, this);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
@Override
public int deleteGoodsByIdPosition(String _id, Integer position) {
try {
return jdbcOperations.update(DELETE_GOODS_BY_ID_POSITION, _id, position.toString());
} catch (Exception ex) {
ex.printStackTrace();
}
return -1;
}
}
19
View Source File : NamedParameterBatchUpdateUtils.java
License : GNU Lesser General Public License v3.0
Project Creator : blackarbiter
License : GNU Lesser General Public License v3.0
Project Creator : blackarbiter
public static void executeBatchUpdate(String sql, ArrayList<Object[]> batchValues, int[] columnTypes, JdbcOperations jdbcOperations) {
}
19
View Source File : SpringBatchUpdateUtils.java
License : GNU Lesser General Public License v3.0
Project Creator : blackarbiter
License : GNU Lesser General Public License v3.0
Project Creator : blackarbiter
public clreplaced SpringBatchUpdateUtils {
JdbcOperations jdbcOperations;
public void queryBatchUpdateUnsafe(String input) {
String sql = "UPDATE Users SET name = '" + input + "' where id = 1";
BatchUpdateUtils.executeBatchUpdate(sql, new ArrayList<Object[]>(), new int[] { Types.INTEGER }, jdbcOperations);
}
public void queryBatchUpdateSafe() {
String sql = "UPDATE Users SET name = 'safe' where id = 1";
BatchUpdateUtils.executeBatchUpdate(sql, new ArrayList<Object[]>(), new int[] { Types.INTEGER }, jdbcOperations);
}
public void queryNamedParamBatchUpdateUnsafe(String input) {
String sql = "UPDATE Users SET name = '" + input + "' where id = 1";
NamedParameterBatchUpdateUtils.executeBatchUpdate(sql, new ArrayList<Object[]>(), new int[] { Types.INTEGER }, jdbcOperations);
}
public void queryNamedParameterBatchUpdateUtilsSafe() {
String sql = "UPDATE Users SET name = 'safe' where id = 1";
NamedParameterBatchUpdateUtils.executeBatchUpdate(sql, new ArrayList<Object[]>(), new int[] { Types.INTEGER }, jdbcOperations);
}
}
19
View Source File : BaseJdbcDbTest.java
License : Apache License 2.0
Project Creator : adorsys
License : Apache License 2.0
Project Creator : adorsys
/**
* Ensures that after each test method there is an empty {@code sts} schema;
*/
@EnableJpaPersistence
@EnableAutoConfiguration(exclude = MongoAutoConfiguration.clreplaced)
public abstract clreplaced BaseJdbcDbTest extends BaseSpringTest {
@Autowired
private Environment env;
@Autowired
private JdbcOperations jdbcOper;
@AfterEach
void destroyAndCreateEmptySchema() {
if (env.acceptsProfiles(Profiles.of("postgres"))) {
jdbcOper.update("DROP SCHEMA IF EXISTS sts CASCADE");
} else {
jdbcOper.update("DROP SCHEMA IF EXISTS sts");
}
jdbcOper.update("CREATE SCHEMA sts");
}
}
18
View Source File : DataSourceInitializerInvokerTests.java
License : Apache License 2.0
Project Creator : yuanmabiji
License : Apache License 2.0
Project Creator : yuanmabiji
private void replacedertDataSourceNotInitialized(DataSource dataSource) {
JdbcOperations template = new JdbcTemplate(dataSource);
try {
template.queryForObject("SELECT COUNT(*) from BAR", Integer.clreplaced);
fail("Query should have failed as BAR table does not exist");
} catch (BadSqlGrammarException ex) {
SQLException sqlException = ex.getSQLException();
// user lacks privilege or object not
int expectedCode = -5501;
// found
replacedertThat(sqlException.getErrorCode()).isEqualTo(expectedCode);
}
}
18
View Source File : AbstractRdbmsKeyValueRepository.java
License : Apache License 2.0
Project Creator : spring-cloud
License : Apache License 2.0
Project Creator : spring-cloud
/**
* Abstract clreplaced for RDBMS based repositories.
*
* @author Glenn Renfro
* @author Ilayaperumal Gopinathan
* @author Gunnar Hillert
*/
public abstract clreplaced AbstractRdbmsKeyValueRepository<D> implements PagingAndSortingRepository<D, String> {
protected String keyColumn;
protected String valueColumn;
protected String selectClause;
protected String tableName = "%PREFIX%%SUFFIX% ";
protected final String LIST_OF_NAMES = "listnames";
protected String whereClauseByKey;
protected String inClauseByKey;
private String findAllQuery;
private String findAllWhereClauseByKey;
protected String saveRow;
private String countAll;
private String countByKey;
private String findAllWhereInClause = findAllQuery + whereClauseByKey;
private String deleteFromTableClause = "DELETE FROM " + tableName;
protected String deleteFromTableByKey = deleteFromTableClause + whereClauseByKey;
protected String tablePrefix;
protected String tableSuffix;
protected JdbcOperations jdbcTemplate;
protected NamedParameterJdbcTemplate namedParameterJdbcTemplate;
protected DataSource dataSource;
protected LinkedHashMap<String, Order> orderMap;
protected final RowMapper<D> rowMapper;
public AbstractRdbmsKeyValueRepository(DataSource dataSource, String tablePrefix, String tableSuffix, RowMapper<D> rowMapper, String keyColumn, String valueColumn) {
replacedert.notNull(dataSource);
replacedert.notNull(rowMapper);
this.jdbcTemplate = new JdbcTemplate(dataSource);
this.namedParameterJdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
this.dataSource = dataSource;
this.orderMap = new LinkedHashMap<>();
this.orderMap.put(keyColumn, Order.ASCENDING);
this.tablePrefix = tablePrefix;
this.tableSuffix = tableSuffix;
this.rowMapper = rowMapper;
this.keyColumn = keyColumn;
this.valueColumn = valueColumn;
tableName = updatePrefixSuffix("%PREFIX%%SUFFIX% ");
selectClause = keyColumn + ", " + valueColumn + " ";
whereClauseByKey = "where " + keyColumn + " = ? ";
inClauseByKey = "where " + keyColumn + " in ( :" + LIST_OF_NAMES + ") ";
findAllQuery = "SELECT " + selectClause + "FROM " + tableName;
findAllWhereClauseByKey = findAllQuery + whereClauseByKey;
saveRow = "INSERT into " + tableName + "(" + keyColumn + ", " + valueColumn + ")" + "values (?, ?)";
countAll = "SELECT COUNT(*) FROM " + tableName;
countByKey = "SELECT COUNT(*) FROM " + tableName + whereClauseByKey;
findAllWhereInClause = findAllQuery + inClauseByKey;
deleteFromTableClause = "DELETE FROM " + tableName;
deleteFromTableByKey = deleteFromTableClause + whereClauseByKey;
}
@Override
public Iterable<D> findAll(Sort sort) {
replacedert.notNull(sort, "sort must not be null");
Iterator<Sort.Order> iter = sort.iterator();
String query = findAllQuery + "ORDER BY ";
while (iter.hasNext()) {
Sort.Order order = iter.next();
query = query + order.getProperty() + " " + order.getDirection();
if (iter.hasNext()) {
query = query + ", ";
}
}
return jdbcTemplate.query(query, rowMapper);
}
public Page<D> search(SearchPageable searchPageable) {
replacedert.notNull(searchPageable, "searchPageable must not be null.");
final StringBuilder whereClause = new StringBuilder("WHERE ");
final List<String> params = new ArrayList<>();
final Iterator<String> columnIterator = searchPageable.getColumns().iterator();
while (columnIterator.hasNext()) {
whereClause.append("lower(" + columnIterator.next()).append(") like ").append("lower(?)");
params.add("%" + searchPageable.getSearchQuery() + "%");
if (columnIterator.hasNext()) {
whereClause.append(" OR ");
}
}
return queryForPageableResults(searchPageable.getPageable(), selectClause, tableName, whereClause.toString(), params.toArray(), count());
}
@Override
public Page<D> findAll(Pageable pageable) {
replacedert.notNull(pageable, "pageable must not be null");
return queryForPageableResults(pageable, selectClause, tableName, null, new Object[] {}, count());
}
@Override
public <S extends D> Iterable<S> save(Iterable<S> iterableDefinitions) {
replacedert.notNull(iterableDefinitions, "iterableDefinitions must not be null");
for (S definition : iterableDefinitions) {
save(definition);
}
return iterableDefinitions;
}
@Override
public D findOne(String name) {
replacedert.hasText(name, "name must not be empty nor null");
try {
return jdbcTemplate.queryForObject(findAllWhereClauseByKey, rowMapper, name);
} catch (EmptyResultDataAccessException e) {
return null;
}
}
@Override
public boolean exists(String name) {
replacedert.hasText(name, "name must not be empty nor null");
boolean result;
try {
result = (jdbcTemplate.queryForObject(countByKey, new Object[] { name }, Long.clreplaced) > 0) ? true : false;
} catch (EmptyResultDataAccessException e) {
result = false;
}
return result;
}
@Override
public Iterable<D> findAll() {
return jdbcTemplate.query(findAllQuery, rowMapper);
}
@Override
public Iterable<D> findAll(Iterable<String> names) {
replacedert.notNull(names, "names must not be null");
List<String> listOfNames = new ArrayList<String>();
for (String name : names) {
listOfNames.add(name);
}
MapSqlParameterSource namedParameters = new MapSqlParameterSource();
namedParameters.addValue(LIST_OF_NAMES, listOfNames);
return namedParameterJdbcTemplate.query(findAllWhereInClause, namedParameters, rowMapper);
}
@Override
public long count() {
try {
return jdbcTemplate.queryForObject(countAll, new Object[] {}, Long.clreplaced);
} catch (EmptyResultDataAccessException e) {
return 0;
}
}
@Override
public void delete(String name) {
replacedert.hasText(name, "name must not be empty nor null");
jdbcTemplate.update(deleteFromTableByKey, name);
}
@Override
public void delete(Iterable<? extends D> definitions) {
replacedert.notNull(definitions, "definitions must not null");
for (D definition : definitions) {
delete(definition);
}
}
@Override
public void deleteAll() {
jdbcTemplate.update(deleteFromTableClause);
}
private String updatePrefixSuffix(String base) {
String updatedPrefix = StringUtils.replace(base, "%PREFIX%", tablePrefix);
return StringUtils.replace(updatedPrefix, "%SUFFIX%", tableSuffix);
}
private Page<D> queryForPageableResults(Pageable pageable, String selectClause, String tableName, String whereClause, Object[] queryParam, long totalCount) {
// FIXME Possible performance improvement refactoring so factory isn't called every time.
SqlPagingQueryProviderFactoryBean factoryBean = new SqlPagingQueryProviderFactoryBean();
factoryBean.setSelectClause(selectClause);
factoryBean.setFromClause(tableName);
if (StringUtils.hasText(whereClause)) {
factoryBean.setWhereClause(whereClause);
}
final Sort sort = pageable.getSort();
final LinkedHashMap<String, Order> sortOrderMap = new LinkedHashMap<>();
if (sort != null) {
for (Sort.Order sortOrder : sort) {
sortOrderMap.put(sortOrder.getProperty(), sortOrder.isAscending() ? Order.ASCENDING : Order.DESCENDING);
}
}
if (!CollectionUtils.isEmpty(sortOrderMap)) {
factoryBean.setSortKeys(sortOrderMap);
} else {
factoryBean.setSortKeys(this.orderMap);
}
factoryBean.setDataSource(this.dataSource);
PagingQueryProvider pagingQueryProvider;
try {
pagingQueryProvider = factoryBean.getObject();
pagingQueryProvider.init(this.dataSource);
} catch (Exception e) {
throw new IllegalStateException(e);
}
String query = pagingQueryProvider.getPageQuery(pageable);
List<D> resultList = jdbcTemplate.query(query, queryParam, rowMapper);
return new PageImpl<>(resultList, pageable, totalCount);
}
}
18
View Source File : NameArrayIntegrationTests.java
License : Apache License 2.0
Project Creator : pgjdbc
License : Apache License 2.0
Project Creator : pgjdbc
@Test
void shouldReadArrayOfName() {
JdbcOperations jdbcOperations = SERVER.getJdbcOperations();
jdbcOperations.execute("DROP TABLE IF EXISTS name_table;");
jdbcOperations.execute("CREATE TABLE name_table (n name)");
this.connection.createStatement("INSERT INTO name_table (n) VALUES('hello'), ('world'), (null)").execute().flatMap(PostgresqlResult::getRowsUpdated).as(StepVerifier::create).expectNextCount(1).verifyComplete();
this.connection.createStatement("select array_agg(n) as names from name_table").execute().flatMap(it -> it.map((row, rowMetadata) -> row.get("names"))).as(StepVerifier::create).replacedertNext(arr -> {
replacedertThat((Object[]) arr).containsExactly(new Object[] { "hello", "world", null });
}).verifyComplete();
}
18
View Source File : JsonIntegrationTests.java
License : Apache License 2.0
Project Creator : pgjdbc
License : Apache License 2.0
Project Creator : pgjdbc
@Test
void shouldReadAndWriteJson() {
JdbcOperations jdbcOperations = SERVER.getJdbcOperations();
jdbcOperations.execute("DROP TABLE IF EXISTS my_table;");
jdbcOperations.execute("CREATE TABLE my_table (my_json JSON)");
PostgresqlConnection connection = this.connectionFactory.create().block();
connection.createStatement("INSERT INTO my_table (my_json) VALUES($1)").bind("$1", Json.of("{\"hello\": \"world\"}")).execute().flatMap(PostgresqlResult::getRowsUpdated).as(StepVerifier::create).expectNextCount(1).verifyComplete();
connection.createStatement("SELECT my_json FROM my_table").execute().flatMap(it -> it.map((row, rowMetadata) -> row.get("my_json", Json.clreplaced))).map(Json::replacedtring).as(StepVerifier::create).expectNext("{\"hello\": \"world\"}").verifyComplete();
connection.close().block();
}
18
View Source File : JsonIntegrationTests.java
License : Apache License 2.0
Project Creator : pgjdbc
License : Apache License 2.0
Project Creator : pgjdbc
@Test
void shouldReadAndWritereplacedtring() {
JdbcOperations jdbcOperations = SERVER.getJdbcOperations();
jdbcOperations.execute("DROP TABLE IF EXISTS my_table;");
jdbcOperations.execute("CREATE TABLE my_table (my_json JSON)");
PostgresqlConnection connection = this.connectionFactory.create().block();
connection.createStatement("INSERT INTO my_table (my_json) VALUES($1::JSON)").bind("$1", "{\"hello\": \"world\"}").execute().flatMap(PostgresqlResult::getRowsUpdated).as(StepVerifier::create).expectNextCount(1).verifyComplete();
connection.createStatement("SELECT my_json FROM my_table").execute().flatMap(it -> it.map((row, rowMetadata) -> row.get("my_json", String.clreplaced))).as(StepVerifier::create).expectNext("{\"hello\": \"world\"}").verifyComplete();
connection.close().block();
}
18
View Source File : DefaultCalendarService.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A default implementation of {@link CalendarService} that delegates to {@link EventDao} and {@link CalendarUserDao}.
*
* @author Rob Winch
* @author Mick Knutson
*/
@Repository
public clreplaced DefaultCalendarService implements CalendarService {
private final EventDao eventDao;
private final CalendarUserDao userDao;
private final JdbcOperations jdbcOperations;
private final PreplacedwordEncoder preplacedwordEncoder;
@Autowired
public DefaultCalendarService(final EventDao eventDao, final CalendarUserDao userDao, final JdbcOperations jdbcOperations, final PreplacedwordEncoder preplacedwordEncoder) {
if (eventDao == null) {
throw new IllegalArgumentException("eventDao cannot be null");
}
if (userDao == null) {
throw new IllegalArgumentException("userDao cannot be null");
}
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
if (preplacedwordEncoder == null) {
throw new IllegalArgumentException("preplacedwordEncoder cannot be null");
}
this.eventDao = eventDao;
this.userDao = userDao;
this.jdbcOperations = jdbcOperations;
this.preplacedwordEncoder = preplacedwordEncoder;
}
@Override
public Event getEvent(int eventId) {
return eventDao.getEvent(eventId);
}
@Override
public int createEvent(Event event) {
return eventDao.createEvent(event);
}
@Override
public List<Event> findForUser(int userId) {
return eventDao.findForUser(userId);
}
@Override
public List<Event> getEvents() {
return eventDao.getEvents();
}
@Override
public CalendarUser getUser(int id) {
return userDao.getUser(id);
}
@Override
public CalendarUser findUserByEmail(String email) {
return userDao.findUserByEmail(email);
}
@Override
public List<CalendarUser> findUsersByEmail(String partialEmail) {
return userDao.findUsersByEmail(partialEmail);
}
@Override
public int createUser(CalendarUser user) {
String encodedPreplacedword = preplacedwordEncoder.encode(user.getPreplacedword());
user.setPreplacedword(encodedPreplacedword);
int userId = userDao.createUser(user);
jdbcOperations.update("insert into calendar_user_authorities(calendar_user,authority) values (?,?)", userId, "ROLE_USER");
return userId;
}
}
18
View Source File : DefaultCalendarService.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A default implementation of {@link CalendarService} that delegates to {@link EventDao} and {@link CalendarUserDao}.
*
* @author Rob Winch
* @author Mick Knutson
*/
@Repository
public clreplaced DefaultCalendarService implements CalendarService {
private final EventDao eventDao;
private final CalendarUserDao userDao;
private final JdbcOperations jdbcOperations;
private final PreplacedwordEncoder preplacedwordEncoder;
@Autowired
public DefaultCalendarService(final EventDao eventDao, final CalendarUserDao userDao, final JdbcOperations jdbcOperations, final PreplacedwordEncoder preplacedwordEncoder) {
if (eventDao == null) {
throw new IllegalArgumentException("eventDao cannot be null");
}
if (userDao == null) {
throw new IllegalArgumentException("userDao cannot be null");
}
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
if (preplacedwordEncoder == null) {
throw new IllegalArgumentException("preplacedwordEncoder cannot be null");
}
this.eventDao = eventDao;
this.userDao = userDao;
this.jdbcOperations = jdbcOperations;
this.preplacedwordEncoder = preplacedwordEncoder;
}
public Event getEvent(int eventId) {
return eventDao.getEvent(eventId);
}
public int createEvent(Event event) {
return eventDao.createEvent(event);
}
public List<Event> findForUser(int userId) {
return eventDao.findForUser(userId);
}
public List<Event> getEvents() {
return eventDao.getEvents();
}
public CalendarUser getUser(int id) {
return userDao.getUser(id);
}
public CalendarUser findUserByEmail(String email) {
return userDao.findUserByEmail(email);
}
public List<CalendarUser> findUsersByEmail(String partialEmail) {
return userDao.findUsersByEmail(partialEmail);
}
public int createUser(CalendarUser user) {
String encodedPreplacedword = preplacedwordEncoder.encode(user.getPreplacedword());
user.setPreplacedword(encodedPreplacedword);
int userId = userDao.createUser(user);
jdbcOperations.update("insert into calendar_user_authorities(calendar_user,authority) values (?,?)", userId, "ROLE_USER");
return userId;
}
}
18
View Source File : DefaultCalendarService.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A default implementation of {@link CalendarService} that delegates to {@link EventDao} and {@link CalendarUserDao}.
*
* @author Rob Winch
* @author Mick Knutson
*/
@Repository
public clreplaced DefaultCalendarService implements CalendarService {
private final EventDao eventDao;
private final CalendarUserDao userDao;
private final JdbcOperations jdbcOperations;
private final PreplacedwordEncoder preplacedwordEncoder;
@Autowired
public DefaultCalendarService(final EventDao eventDao, final CalendarUserDao userDao, final JdbcOperations jdbcOperations, final PreplacedwordEncoder preplacedwordEncoder) {
if (eventDao == null) {
throw new IllegalArgumentException("eventDao cannot be null");
}
if (userDao == null) {
throw new IllegalArgumentException("userDao cannot be null");
}
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
if (preplacedwordEncoder == null) {
throw new IllegalArgumentException("preplacedwordEncoder cannot be null");
}
this.eventDao = eventDao;
this.userDao = userDao;
this.jdbcOperations = jdbcOperations;
this.preplacedwordEncoder = preplacedwordEncoder;
}
public Event getEvent(int eventId) {
return eventDao.getEvent(eventId);
}
public int createEvent(Event event) {
return eventDao.createEvent(event);
}
public List<Event> findForUser(int userId) {
return eventDao.findForUser(userId);
}
public List<Event> getEvents() {
return eventDao.getEvents();
}
public CalendarUser getUser(int id) {
return userDao.getUser(id);
}
public CalendarUser findUserByEmail(String email) {
return userDao.findUserByEmail(email);
}
public List<CalendarUser> findUsersByEmail(String partialEmail) {
return userDao.findUsersByEmail(partialEmail);
}
public int createUser(CalendarUser user) {
String encodedPreplacedword = preplacedwordEncoder.encodePreplacedword(user.getPreplacedword(), null);
user.setPreplacedword(encodedPreplacedword);
int userId = userDao.createUser(user);
jdbcOperations.update("insert into calendar_user_authorities(calendar_user,authority) values (?,?)", userId, "ROLE_USER");
return userId;
}
}
18
View Source File : DefaultCalendarService.java
License : MIT License
Project Creator : PacktPublishing
License : MIT License
Project Creator : PacktPublishing
/**
* A default implementation of {@link CalendarService} that delegates to {@link EventDao} and {@link CalendarUserDao}.
*
* @author Rob Winch
* @author Mick Knutson
*/
@Repository
public clreplaced DefaultCalendarService implements CalendarService {
private final EventDao eventDao;
private final CalendarUserDao userDao;
private final JdbcOperations jdbcOperations;
@Autowired
public DefaultCalendarService(final EventDao eventDao, final CalendarUserDao userDao, final JdbcOperations jdbcOperations) {
if (eventDao == null) {
throw new IllegalArgumentException("eventDao cannot be null");
}
if (userDao == null) {
throw new IllegalArgumentException("userDao cannot be null");
}
if (jdbcOperations == null) {
throw new IllegalArgumentException("jdbcOperations cannot be null");
}
this.eventDao = eventDao;
this.userDao = userDao;
this.jdbcOperations = jdbcOperations;
}
public Event getEvent(int eventId) {
return eventDao.getEvent(eventId);
}
public int createEvent(Event event) {
return eventDao.createEvent(event);
}
public List<Event> findForUser(int userId) {
return eventDao.findForUser(userId);
}
public List<Event> getEvents() {
return eventDao.getEvents();
}
public CalendarUser getUser(int id) {
return userDao.getUser(id);
}
public CalendarUser findUserByEmail(String email) {
return userDao.findUserByEmail(email);
}
public List<CalendarUser> findUsersByEmail(String partialEmail) {
return userDao.findUsersByEmail(partialEmail);
}
public int createUser(CalendarUser user) {
int userId = userDao.createUser(user);
jdbcOperations.update("insert into calendar_user_authorities(calendar_user,authority) values (?,?)", userId, "ROLE_USER");
return userId;
}
}
18
View Source File : JdbcHashRepository.java
License : Apache License 2.0
Project Creator : naver
License : Apache License 2.0
Project Creator : naver
/**
* Jdbc 를 통해 hash 값을 저장하는 HashRepository 구현체
* 지원하는 DB: H2, MySql
*
* @author yongkyu.lee
* @since 0.1
*/
public clreplaced JdbcHashRepository implements HashRepository {
private static final Logger log = LoggerFactory.getLogger(JdbcHashRepository.clreplaced);
private static final String selectSql = "SELECT item_hash FROM BATCHEX_ITEM_HASH WHERE item_key = ? AND expiry > ?";
private static final String saveSqlForH2 = "MERGE INTO BATCHEX_ITEM_HASH (item_key, item_hash, expiry) KEY (item_key) VALUES (?, ?, ?)";
private static final String saveSqlForMysql = "INSERT INTO BATCHEX_ITEM_HASH (item_key, item_hash, expiry) VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE item_hash = VALUES(item_hash), expiry = VALUES(expiry)";
private static final String deleteExpiredSql = "DELETE FROM BATCHEX_ITEM_HASH WHERE expiry < ? LIMIT 100";
private final JdbcOperations jdbcOperations;
private final TransactionOperations transactionOperations;
private final DatabaseType databaseType;
/**
* @param dataSource {@link DataSource}
* @param platformTransactionManager {@link PlatformTransactionManager}
* @throws MetaDataAccessException {@link UnsupportedDatabaseException}
*/
public JdbcHashRepository(DataSource dataSource, PlatformTransactionManager platformTransactionManager) throws MetaDataAccessException {
this.transactionOperations = new TransactionTemplate(platformTransactionManager);
this.jdbcOperations = new JdbcTemplate(dataSource);
this.databaseType = DatabaseType.fromMetaData(dataSource);
if (this.databaseType != DatabaseType.MYSQL && this.databaseType != DatabaseType.H2) {
throw new UnsupportedDatabaseException("'" + this.databaseType + "' is not support");
}
}
@Override
public String getHashValue(String itemKey) {
String hashValue = null;
SqlRowSet rowSet = jdbcOperations.queryForRowSet(selectSql, itemKey, new Date());
if (rowSet.next()) {
hashValue = rowSet.getString("item_hash");
}
return hashValue;
}
@Override
public void saveItemHashes(List<ItemHash> itemHashes) {
final String sql = getSaveSql();
final List<Object[]> args = itemHashes.stream().map(p -> new Object[] { p.gereplacedemKey(), p.gereplacedemHash(), p.getExpiry() }).collect(Collectors.toList());
if (log.isDebugEnabled()) {
log.debug("Save ItemHashes {}", itemHashes.size());
}
transactionOperations.execute(new TransactionCallbackWithoutResult() {
@Override
protected void doInTransactionWithoutResult(TransactionStatus status) {
jdbcOperations.batchUpdate(sql, args);
jdbcOperations.update(deleteExpiredSql, new Date());
}
});
}
private String getSaveSql() {
if (this.databaseType == DatabaseType.H2) {
return saveSqlForH2;
} else {
return saveSqlForMysql;
}
}
}
18
View Source File : Fix102AddressBookMigrationTest.java
License : Apache License 2.0
Project Creator : hashgraph
License : Apache License 2.0
Project Creator : hashgraph
@Tag("migration")
@Tag("v1")
clreplaced Fix102AddressBookMigrationTest extends IntegrationTest {
@Resource
private AddressBookRepository addressBookRepository;
@Resource
private JdbcOperations jdbcOperations;
@Value("clreplacedpath:db/migration/v1/V1.31.0__fix_address_book_102.sql")
private File sql;
@Test
void latest102WithNonNullTimestamp() throws IOException {
AddressBook addressBook1 = addressBook(102, 1, 100L);
AddressBook addressBook2 = addressBook(101, 101, null);
AddressBook addressBook3 = addressBook(101, 201, null);
runMigration();
replacedertEndTimestamp(addressBook1, null);
replacedertEndTimestamp(addressBook2, 200L);
replacedertEndTimestamp(addressBook3, null);
}
@Test
void previous102WithIncorrectTimestamp() throws IOException {
AddressBook addressBook1 = addressBook(102, 1, 200L);
AddressBook addressBook2 = addressBook(102, 101, 300L);
AddressBook addressBook3 = addressBook(101, 201, null);
AddressBook addressBook4 = addressBook(101, 301, null);
runMigration();
replacedertEndTimestamp(addressBook1, 100L);
replacedertEndTimestamp(addressBook2, null);
replacedertEndTimestamp(addressBook3, 300L);
replacedertEndTimestamp(addressBook4, null);
}
@Test
void noChanges() throws IOException {
AddressBook addressBook1 = addressBook(102, 1, 100L);
AddressBook addressBook2 = addressBook(102, 101, null);
AddressBook addressBook3 = addressBook(101, 201, 300L);
AddressBook addressBook4 = addressBook(101, 301, null);
runMigration();
replacedertEndTimestamp(addressBook1, 100L);
replacedertEndTimestamp(addressBook2, null);
replacedertEndTimestamp(addressBook3, 300L);
replacedertEndTimestamp(addressBook4, null);
}
private AddressBook addressBook(long fileId, long startConsensusTimestamp, Long endConsensusTimestamp) {
AddressBook addressBook = new AddressBook();
addressBook.setEndConsensusTimestamp(endConsensusTimestamp);
addressBook.setFileData(new byte[] {});
addressBook.setFileId(EnreplacedyId.of(0, 0, fileId, EnreplacedyTypeEnum.FILE));
addressBook.setStartConsensusTimestamp(startConsensusTimestamp);
return addressBookRepository.save(addressBook);
}
private void runMigration() throws IOException {
jdbcOperations.update(FileUtils.readFileToString(sql, "UTF-8"));
}
private void replacedertEndTimestamp(AddressBook addressBook, Long endConsensusTimestamp) {
replacedertThat(addressBookRepository.findById(addressBook.getStartConsensusTimestamp())).get().extracting(AddressBook::getEndConsensusTimestamp).isEqualTo(endConsensusTimestamp);
}
}
18
View Source File : AssociativeServiceTemplate.java
License : Apache License 2.0
Project Creator : f0rb
License : Apache License 2.0
Project Creator : f0rb
/**
* replacedociativeServiceTemplate
*
* @author f0rb on 2019-05-30
*/
public clreplaced replacedociativeServiceTemplate<L, R> implements replacedociativeService<L, R> {
private final replacedociativeSqlBuilder sqlBuilder;
private final SingleColumnRowMapper<L> leftRowMapper = new SingleColumnRowMapper<>();
private final SingleColumnRowMapper<R> rightRowMapper = new SingleColumnRowMapper<>();
@Autowired
private JdbcOperations jdbcOperations;
@Autowired(required = false)
private UserIdProvider<?> userIdProvider;
public replacedociativeServiceTemplate(String table, String left, String right) {
this(table, left, right, null);
}
public replacedociativeServiceTemplate(String table, String left, String right, String createUserColumn) {
this.sqlBuilder = new replacedociativeSqlBuilder(table, left, right, createUserColumn);
}
@Override
public boolean exists(Collection<L> leftIds, Collection<R> rightIds) {
return count(leftIds, rightIds) > 0;
}
@Override
public long count(Collection<L> leftIds, Collection<R> rightIds) {
return count(leftIds.toArray(), rightIds.toArray());
}
private Long count(Object[] leftIds, Object[] rightIds) {
if (leftIds.length == 0 || rightIds.length == 0) {
return 0L;
}
SqlAndArgs sqlAndArgs = sqlBuilder.buildCount(leftIds, rightIds);
return jdbcOperations.queryForObject(sqlAndArgs.getSql(), sqlAndArgs.getArgs(), Long.clreplaced);
}
@Override
public List<R> getByLeftId(L leftId) {
return jdbcOperations.query(sqlBuilder.getByLeftId, rightRowMapper, leftId);
}
@Override
public int deleteByLeftId(L leftId) {
return jdbcOperations.update(sqlBuilder.deleteByLeftId, leftId);
}
@Override
public List<L> getByRightId(R rightId) {
return jdbcOperations.query(sqlBuilder.getByRightId, leftRowMapper, rightId);
}
@Override
public int deleteByRightId(R rightId) {
return jdbcOperations.update(sqlBuilder.deleteByRightId, rightId);
}
@Override
public int deallocate(Collection<L> leftIds, Collection<R> rightIds) {
SqlAndArgs sqlAndArgs = sqlBuilder.buildDeallocate(leftIds.toArray(), rightIds.toArray());
return jdbcOperations.update(sqlAndArgs.getSql(), sqlAndArgs.getArgs());
}
@Override
@Transactional
public int reallocateForLeft(L leftId, Collection<R> rightIds) {
deleteByLeftId(leftId);
if (rightIds.isEmpty()) {
return 0;
}
return allocate(singleton(leftId), rightIds);
}
@Override
@Transactional
public int reallocateForRight(R rightId, Collection<L> leftIds) {
deleteByRightId(rightId);
if (leftIds.isEmpty()) {
return 0;
}
return allocate(leftIds, singleton(rightId));
}
@Override
public int allocate(Collection<L> leftIds, Collection<R> rightIds) {
SqlAndArgs sqlAndArgs = sqlBuilder.buildAllocate(leftIds, rightIds, (Long) (userIdProvider == null ? null : userIdProvider.getUserId()));
return jdbcOperations.update(sqlAndArgs.getSql(), sqlAndArgs.getArgs());
}
}
18
View Source File : AbstractDynamicService.java
License : Apache License 2.0
Project Creator : f0rb
License : Apache License 2.0
Project Creator : f0rb
@Autowired
@SuppressWarnings("unchecked")
public void setJdbcOperations(JdbcOperations jdbcOperations) {
dataAccess = new JdbcDataAccess<>(jdbcOperations, enreplacedyClreplaced, (Clreplaced<I>) BeanUtil.getActualTypeArguments(getClreplaced())[1], getRowMapper());
}
18
View Source File : AbstractHitting.java
License : MIT License
Project Creator : aoju
License : MIT License
Project Creator : aoju
/**
* @author Kimi Liu
* @version 6.2.1
* @since JDK 1.8+
*/
public abstract clreplaced AbstractHitting implements Hitting {
private static final ExecutorService executor = Executors.newSingleThreadExecutor(r -> {
Thread thread = new Thread(r);
thread.setName("cache:db-writer");
thread.setDaemon(true);
return thread;
});
private static final Lock lock = new ReentrantLock();
private volatile boolean isShutdown = false;
private BlockingQueue<CachePair<String, Integer>> hitQueue = new LinkedTransferQueue<>();
private BlockingQueue<CachePair<String, Integer>> requireQueue = new LinkedTransferQueue<>();
private JdbcOperations jdbcOperations;
private Properties sqls;
protected AbstractHitting(Map<String, Object> context) {
InputStream resource = this.getClreplaced().getClreplacedLoader().getResourcereplacedtream(Normal.META_DATA_INF + "/caches/bus-cache.yaml");
this.sqls = new Yaml().loadAs(resource, Properties.clreplaced);
this.jdbcOperations = jdbcOperationsSupplier(context).get();
executor.submit(() -> {
while (!isShutdown) {
dumpToDB(hitQueue, "hit_count");
dumpToDB(requireQueue, "require_count");
}
});
}
public AbstractHitting(String url, String username, String preplacedword) {
this(newHashMap("url", url, "username", username, "preplacedword", preplacedword));
}
public static Map<String, Object> newHashMap(Object... keyValues) {
Map<String, Object> map = new HashMap<>(keyValues.length / 2);
for (int i = 0; i < keyValues.length; i += 2) {
String key = (String) keyValues[i];
Object value = keyValues[i + 1];
map.put(key, value);
}
return map;
}
/**
* 1. create JdbcOperations
* 2. init db(like: load sql script, create table, init table...)
*
* @param context :other parameters from constructor
* @return initiated JdbOperations object
*/
protected abstract Supplier<JdbcOperations> jdbcOperationsSupplier(Map<String, Object> context);
/**
* convert DB Map Result to DataDO(Stream)
*
* @param map result from query DB.
* @return the object
*/
protected abstract Stream<DataDO> transferResults(List<Map<String, Object>> map);
private void dumpToDB(BlockingQueue<CachePair<String, Integer>> queue, String column) {
long times = 0;
CachePair<String, Integer> head;
// gather queue's all or before 100 data to a Map
Map<String, AtomicLong> holdMap = new HashMap<>();
while ((head = queue.poll()) != null && times <= 100) {
holdMap.computeIfAbsent(head.getLeft(), (key) -> new AtomicLong(0L)).addAndGet(head.getRight());
++times;
}
// batch write to DB
holdMap.forEach((pattern, count) -> countAddCas(column, pattern, count.get()));
}
@Override
public void hitIncr(String pattern, int count) {
if (count != 0)
hitQueue.add(CachePair.of(pattern, count));
}
@Override
public void reqIncr(String pattern, int count) {
if (count != 0)
requireQueue.add(CachePair.of(pattern, count));
}
@Override
public Map<String, Hitting.HittingDO> getHitting() {
List<DataDO> dataDOS = queryAll();
AtomicLong statisticreplaced = new AtomicLong(0);
AtomicLong statisticsRequired = new AtomicLong(0);
// gather pattern's hit rate
Map<String, Hitting.HittingDO> result = dataDOS.stream().collect(Collectors.toMap(DataDO::getPattern, (dataDO) -> {
statisticreplaced.addAndGet(dataDO.hitCount);
statisticsRequired.addAndGet(dataDO.requireCount);
return Hitting.HittingDO.newInstance(dataDO.hitCount, dataDO.requireCount);
}, Hitting.HittingDO::mergeShootingDO, LinkedHashMap::new));
// gather application all pattern's hit rate
result.put(summaryName(), Hitting.HittingDO.newInstance(statisticreplaced.get(), statisticsRequired.get()));
return result;
}
@Override
public void reset(String pattern) {
jdbcOperations.update(sqls.getProperty("delete"), pattern);
}
@Override
public void resetAll() {
jdbcOperations.update(sqls.getProperty("truncate"));
}
private void countAddCas(String column, String pattern, long count) {
Optional<DataDO> dataOptional = queryObject(pattern);
// if has pattern record, update it.
if (dataOptional.isPresent()) {
DataDO dataDO = dataOptional.get();
while (update(column, pattern, getObjectCount(dataDO, column, count), dataDO.version) <= 0) {
dataDO = queryObject(pattern).get();
}
} else {
lock.lock();
try {
// double check
dataOptional = queryObject(pattern);
if (dataOptional.isPresent()) {
update(column, pattern, count, dataOptional.get().version);
} else {
insert(column, pattern, count);
}
} finally {
lock.unlock();
}
}
}
private Optional<DataDO> queryObject(String pattern) {
String selectSql = sqls.getProperty("select");
List<Map<String, Object>> mapResults = jdbcOperations.queryForList(selectSql, pattern);
return transferResults(mapResults).findFirst();
}
private List<DataDO> queryAll() {
String selectAllQuery = sqls.getProperty("select_all");
List<Map<String, Object>> mapResults = jdbcOperations.queryForList(selectAllQuery);
return transferResults(mapResults).collect(Collectors.toList());
}
private int insert(String column, String pattern, long count) {
String insertSql = String.format(sqls.getProperty("insert"), column);
return jdbcOperations.update(insertSql, pattern, count);
}
private int update(String column, String pattern, long count, long version) {
String updateSql = String.format(sqls.getProperty("update"), column);
return jdbcOperations.update(updateSql, count, pattern, version);
}
private long getObjectCount(DataDO data, String column, long countOffset) {
long lastCount = column.equals("hit_count") ? data.hitCount : data.requireCount;
return lastCount + countOffset;
}
@PreDestroy
public void tearDown() {
while (hitQueue.size() > 0 || requireQueue.size() > 0) {
try {
TimeUnit.SECONDS.sleep(1);
} catch (InterruptedException ignored) {
}
}
isShutdown = true;
}
protected static final clreplaced DataDO {
private String pattern;
private long hitCount;
private long requireCount;
private long version;
public String getPattern() {
return pattern;
}
public void setPattern(String pattern) {
this.pattern = pattern;
}
public void setHitCount(long hitCount) {
this.hitCount = hitCount;
}
public void setRequireCount(long requireCount) {
this.requireCount = requireCount;
}
public void setVersion(long version) {
this.version = version;
}
}
}
See More Examples