RED-6270: Implemented the adding of dictionary entries via batch updates to improve writing speed

This commit is contained in:
Viktor Seifert 2023-04-28 15:33:26 +02:00
parent dd78b08a77
commit 080014021b
3 changed files with 35 additions and 37 deletions

View File

@ -1,11 +1,16 @@
package com.iqser.red.service.persistence.management.v1.processor;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.cloud.openfeign.support.PageJacksonModule;
import org.springframework.cloud.openfeign.support.SortJacksonModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Lazy;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.retry.backoff.ExponentialBackOffPolicy;
import org.springframework.retry.policy.SimpleRetryPolicy;
import org.springframework.retry.support.RetryTemplate;
@ -51,4 +56,11 @@ public class PersistenceServiceProcessorConfiguration {
return retryTemplate;
}
@Bean
public JdbcTemplate jdbcTemplate(@Qualifier("multiTenantDataSource") @Lazy DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
}

View File

@ -12,20 +12,13 @@ import java.util.Map;
import java.util.stream.Collectors;
import javax.persistence.Column;
import javax.persistence.EntityManager;
import javax.persistence.Table;
import javax.transaction.Transactional;
import org.apache.commons.lang3.StringUtils;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.jdbc.datasource.SingleConnectionDataSource;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.management.v1.processor.service.persistence.mulitenancy.DynamicDataSourceBasedMultiTenantConnectionProvider;
import com.iqser.red.service.persistence.management.v1.processor.utils.multitenancy.TenantContext;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.RequiredArgsConstructor;
@ -35,43 +28,26 @@ import lombok.SneakyThrows;
@RequiredArgsConstructor
public class JDBCWriteUtils {
private final String SQL_TEMPLATE = "INSERT INTO %s (%s) values (%s)";
private static final String SQL_TEMPLATE = "INSERT INTO %s (%s) values (%s)";
private final Map<Class<?>, EntityMetadata> entityMetadataMap = new HashMap<>();
private final EntityManager entityManager;
private final JdbcTemplate jdbcTemplate;
@SneakyThrows
@Transactional
public <T> void saveBatch(final List<T> entities) {
public <T> void saveBatch(List<T> entities) {
if (entities.isEmpty()) {
return;
}
var metadata = getEntityMetadata(entities.iterator().next());
var metadata = getEntityMetadata(entities.get(0));
final int batchSize = 500;
var query = entityManager.createNativeQuery(metadata.getSqlStatement());
for (int j = 0; j < entities.size(); j += batchSize) {
final List<T> batchList = entities.subList(j, Math.min(j + batchSize, entities.size()));
for (var entity : batchList) {
int paramIndex = 1;
for (var mapping : metadata.getFieldMethodMap().entrySet()) {
query.setParameter(paramIndex++, mapping.getValue().invoke(entity));
}
query.executeUpdate();
}
}
entityManager.clear();
jdbcTemplate.batchUpdate(metadata.getSqlStatement(), entities, batchSize, metadata::setValues);
}
@ -144,6 +120,16 @@ public class JDBCWriteUtils {
private String sqlStatement;
private Map<String, Method> fieldMethodMap;
@SneakyThrows
public void setValues(PreparedStatement preparedStatement, Object entity) {
int paramIndex = 1;
for (var mapping : getFieldMethodMap().entrySet()) {
preparedStatement.setObject(paramIndex++, mapping.getValue().invoke(entity));
}
}
}
}

View File

@ -101,33 +101,33 @@ public class EntityPerformanceTest extends AbstractPersistenceServerServiceTest
@Test
public void testWritePerformance() {
var tenKEntries = generateEntries(10_000);
final int numberOfEntries = 10_000;
var entries = generateEntries(numberOfEntries);
var template = dossierTemplateTesterAndProvider.provideTestTemplate("test");
var type1 = typeProvider.testAndProvideType(template, null, "t1");
var type2 = typeProvider.testAndProvideType(template, null, "t2");
List<DictionaryEntryEntity> type1Entries = tenKEntries.stream().map(s -> new DictionaryEntryEntity(0, s, 1, false, type1.getTypeId())).collect(Collectors.toList());
List<DictionaryEntryEntity> type1Entries = entries.stream().map(s -> new DictionaryEntryEntity(0, s, 1, false, type1.getTypeId())).collect(Collectors.toList());
List<DictionaryEntryEntity> type2Entries = tenKEntries.stream().map(s -> new DictionaryEntryEntity(0, s, 1, false, type2.getTypeId())).collect(Collectors.toList());
List<DictionaryEntryEntity> type2Entries = entries.stream().map(s -> new DictionaryEntryEntity(0, s, 1, false, type2.getTypeId())).collect(Collectors.toList());
assertThat(entryRepository.count()).isEqualTo(0);
long t1 = System.currentTimeMillis();
entryRepository.saveAll(type1Entries);
long jpaTime = System.currentTimeMillis() - t1;
assertThat(entryRepository.count()).isEqualTo(10_000);
assertThat(entryRepository.count()).isEqualTo(numberOfEntries);
t1 = System.currentTimeMillis();
jdbcWriteUtils.saveBatch(type2Entries);
long jdbcTime = System.currentTimeMillis() - t1;
assertThat(entryRepository.count()).isEqualTo(20_000);
assertThat(entryRepository.count()).isEqualTo(2 * numberOfEntries);
// assertThat(jpaTime).isGreaterThan(jdbcTime);
System.out.println("JPA Time: " + jpaTime + "ms for 10k entries");
System.out.println("JDBC Time: " + jdbcTime + "ms for 10k entries");
log.info("JPA Time: {} ms for {} entries", jpaTime, numberOfEntries);
log.info("JDBC Time: {} ms for {} entries", jdbcTime, numberOfEntries);
}
}