RED-5669: Adapted service to new elasticseach api
This commit is contained in:
parent
b2c013ece4
commit
41fbdaca71
@ -32,7 +32,7 @@ public class MatchedDocument {
|
||||
private String fileName;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Set<String>> highlights = new HashMap<>();
|
||||
private Map<String, List<String>> highlights = new HashMap<>();
|
||||
|
||||
@Builder.Default
|
||||
private Set<String> matchedTerms = new HashSet<>();
|
||||
|
||||
@ -51,12 +51,29 @@
|
||||
<artifactId>jackson-commons</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.opensearch.client</groupId>
|
||||
<artifactId>opensearch-rest-high-level-client</artifactId>
|
||||
<version>1.2.4</version>
|
||||
<groupId>co.elastic.clients</groupId>
|
||||
<artifactId>elasticsearch-java</artifactId>
|
||||
<version>8.6.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>com.fasterxml.jackson.core</groupId>-->
|
||||
<!-- <artifactId>jackson-databind</artifactId>-->
|
||||
<!-- </dependency>-->
|
||||
|
||||
<dependency>
|
||||
<groupId>jakarta.json</groupId>
|
||||
<artifactId>jakarta.json-api</artifactId>
|
||||
<version>2.0.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency>-->
|
||||
<!-- <groupId>com.fasterxml.jackson.dataformat</groupId>-->
|
||||
<!-- <artifactId>jackson-dataformat-xml</artifactId>-->
|
||||
<!-- </dependency>-->
|
||||
|
||||
<!-- spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
|
||||
@ -1,5 +1,9 @@
|
||||
package com.iqser.red.service.search.v1.server;
|
||||
|
||||
import static com.fasterxml.jackson.databind.DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES;
|
||||
|
||||
import java.util.TimeZone;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
@ -9,10 +13,15 @@ import org.springframework.boot.context.properties.EnableConfigurationProperties
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
import com.fasterxml.jackson.databind.DeserializationFeature;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import com.iqser.red.commons.spring.DefaultWebMvcConfiguration;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.EsClient;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
|
||||
@ -32,10 +41,24 @@ public class Application {
|
||||
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public ElasticsearchClient elasticsearchClient(ElasticsearchSettings elasticsearchSettings) {
|
||||
@Primary
|
||||
public ObjectMapper objectMapper() {
|
||||
|
||||
return new ElasticsearchClient(elasticsearchSettings);
|
||||
return new ObjectMapper().disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
|
||||
.enable(SerializationFeature.WRITE_DATES_WITH_ZONE_ID)
|
||||
.disable(DeserializationFeature.ADJUST_DATES_TO_CONTEXT_TIME_ZONE)
|
||||
.configure(FAIL_ON_UNKNOWN_PROPERTIES, false)
|
||||
.registerModule(new JavaTimeModule())
|
||||
.setTimeZone(TimeZone.getDefault())
|
||||
.findAndRegisterModules();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public EsClient elasticsearchClient(ElasticsearchSettings elasticsearchSettings) {
|
||||
|
||||
return new EsClient(elasticsearchSettings);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -6,7 +6,7 @@ import org.springframework.web.bind.annotation.RestController;
|
||||
import com.iqser.red.service.search.v1.model.SearchRequest;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.resources.SearchResource;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.SearchService;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
|
||||
@ -5,6 +5,7 @@ public class IndexException extends RuntimeException {
|
||||
public static final String INDEX_EXISTS_ERROR = "Unable to check, if index exists";
|
||||
public static final String CONTENT_TO_JSON_ERROR = "Could not convert document with id '%s' to JSON!";
|
||||
public static final String DOCUMENT_INDEX_ERROR = "Error during indexing document with id '%s'";
|
||||
public static final String DOCUMENT_UPDATE_ERROR = "Error during updating document with id '%s'";
|
||||
public static final String DOCUMENT_DELETE_ERROR = "Error during deleting document with id '%s'";
|
||||
public static final String FAILED_TO_SEARCH = "Error during search";
|
||||
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
@ -23,7 +23,7 @@ public class IndexDocument implements Serializable {
|
||||
private String fileId;
|
||||
private String filename;
|
||||
|
||||
private OffsetDateTime date;
|
||||
private Date date;
|
||||
private String assignee;
|
||||
private boolean dossierDeleted;
|
||||
private boolean dossierArchived;
|
||||
|
||||
@ -24,15 +24,15 @@ import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexCreatorService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.service.TextStorageService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.IndexDeleteService;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@ -54,6 +54,7 @@ public class IndexingMessageReceiver {
|
||||
private final RabbitTemplate rabbitTemplate;
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
private final IndexInformationService indexInformationService;
|
||||
private final IndexDocumentConverterService indexDocumentConverterService;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@ -88,12 +89,14 @@ public class IndexingMessageReceiver {
|
||||
case UPDATE:
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(),
|
||||
fileStatus.getAssignee(),
|
||||
|
||||
var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
fileStatus.getWorkflowStatus().name(),
|
||||
fileStatus.getFileAttributes());
|
||||
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
log.info("Successfully updated {}", indexRequest);
|
||||
break;
|
||||
|
||||
@ -152,7 +155,8 @@ public class IndexingMessageReceiver {
|
||||
|
||||
fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
|
||||
Text text = textStorageService.getText(dossier.getId(), file.getId());
|
||||
documentIndexService.indexDocument(dossier.getDossierTemplateId(),
|
||||
|
||||
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
|
||||
dossier.getId(),
|
||||
file.getId(),
|
||||
file.getFilename(),
|
||||
@ -162,6 +166,8 @@ public class IndexingMessageReceiver {
|
||||
dossier.getArchivedTime() != null,
|
||||
file.getWorkflowStatus(),
|
||||
file.getFileAttributes());
|
||||
|
||||
documentIndexService.indexDocument(indexDocument);
|
||||
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
|
||||
log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
|
||||
}
|
||||
|
||||
@ -1,123 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.opensearch.action.index.IndexRequest;
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexSection;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionArea;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionText;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentIndexService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
|
||||
@Timed("redactmanager_indexDocument")
|
||||
public void indexDocument(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
|
||||
IndexRequest indexRequest = new IndexRequest(INDEX_NAME).id(fileId);
|
||||
indexRequest.setRefreshPolicy(settings.getRefreshPolicy());
|
||||
indexRequest.source(toJson(convert(dossierTemplateId, dossierId, fileId, filename, text, assignee, deleted, archived, workflowStatus, fileAttributes)), XContentType.JSON);
|
||||
|
||||
try {
|
||||
client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_INDEX_ERROR, fileId), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String toJson(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
return objectMapper.writeValueAsString(indexDocument);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IndexException(String.format(IndexException.CONTENT_TO_JSON_ERROR, indexDocument.getFileId()), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private IndexDocument convert(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
|
||||
return IndexDocument.builder()
|
||||
.dossierTemplateId(dossierTemplateId)
|
||||
.dossierId(dossierId)
|
||||
.fileId(fileId)
|
||||
.filename(filename)
|
||||
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
|
||||
.date(OffsetDateTime.now())
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus.name())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
|
||||
|
||||
List<IndexFileAttribute> converted = new ArrayList<>();
|
||||
fileAttributes.entrySet().forEach(entry -> converted.add(new IndexFileAttribute(entry.getKey(), entry.getValue())));
|
||||
return converted;
|
||||
}
|
||||
|
||||
|
||||
private IndexSection convert(SectionText sectionText) {
|
||||
|
||||
return IndexSection.builder()
|
||||
.sectionNumber(sectionText.getSectionNumber())
|
||||
.text(sectionText.getText())
|
||||
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
|
||||
.headline(sectionText.getHeadline())
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,64 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.opensearch.action.update.UpdateRequest;
|
||||
import org.opensearch.action.update.UpdateResponse;
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentUpdateService {
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_updateDocument")
|
||||
public void updateDocument(String fileId, String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
|
||||
|
||||
var indexUpdateRequest = IndexDocumentUpdate.builder()
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus)
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.build();
|
||||
|
||||
UpdateRequest request = new UpdateRequest(INDEX_NAME, fileId);
|
||||
request.doc(objectMapper.writeValueAsString(indexUpdateRequest), XContentType.JSON);
|
||||
|
||||
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
|
||||
if (updateResponse.status().getStatus() < 200 || updateResponse.status().getStatus() > 204) {
|
||||
throw new IllegalStateException("Document could not be updated. HTTP error " + updateResponse.status().getStatus());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
|
||||
|
||||
List<IndexFileAttribute> converted = new ArrayList<>();
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
fileAttributes.forEach((key, value) -> converted.add(new IndexFileAttribute(key, value)));
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,75 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.INDEX_EXISTS_ERROR;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.opensearch.client.indices.CreateIndexRequest;
|
||||
import org.opensearch.client.indices.CreateIndexResponse;
|
||||
import org.opensearch.client.indices.GetIndexRequest;
|
||||
import org.opensearch.common.settings.Settings;
|
||||
import org.opensearch.common.xcontent.XContentType;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.ResourceLoader;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class IndexCreatorService {
|
||||
|
||||
public static final String INDEX_NAME = "redaction";
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public IndexCreatorService(ElasticsearchClient client, ElasticsearchSettings settings) {
|
||||
|
||||
this.client = client;
|
||||
this.settings = settings;
|
||||
|
||||
if (!indexExists()) {
|
||||
createIndex();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void createIndex() {
|
||||
|
||||
String indexMapping = ResourceLoader.load("index/mapping.json");
|
||||
String indexSettings = ResourceLoader.load("index/settings.json");
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder()
|
||||
.loadFromSource(indexSettings, XContentType.JSON)
|
||||
.put("number_of_shards", settings.getNumberOfShards())
|
||||
.put("number_of_replicas", settings.getNumberOfReplicas())
|
||||
.put("index.mapping.nested_objects.limit", settings.getNumberOfNestedObjectLimit());
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME).settings(settingsBuilder.build()).mapping(indexMapping, XContentType.JSON);
|
||||
|
||||
try {
|
||||
CreateIndexResponse response = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists() {
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest(INDEX_NAME);
|
||||
try {
|
||||
return client.indices().exists(getIndexRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(INDEX_EXISTS_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,61 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.opensearch.action.support.master.AcknowledgedResponse;
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.opensearch.client.indices.CloseIndexRequest;
|
||||
import org.opensearch.common.unit.TimeValue;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class IndexDeleteService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
log.info("Will close index");
|
||||
CloseIndexRequest request = new CloseIndexRequest(INDEX_NAME);
|
||||
request.setTimeout(TimeValue.timeValueMinutes(2));
|
||||
AcknowledgedResponse closeIndexResponse = client.indices().close(request, RequestOptions.DEFAULT);
|
||||
|
||||
if (closeIndexResponse.isAcknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
throw new IndexException("Error while closing index");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
log.info("Will drop index");
|
||||
DeleteIndexRequest request = new DeleteIndexRequest(INDEX_NAME);
|
||||
request.timeout(TimeValue.timeValueMinutes(2));
|
||||
request.timeout("2m");
|
||||
AcknowledgedResponse deleteIndexResponse = client.indices().delete(request, RequestOptions.DEFAULT);
|
||||
|
||||
if (deleteIndexResponse.isAcknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
} else {
|
||||
throw new IndexException("Error while dropping index");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,84 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexSection;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionArea;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionText;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
|
||||
@Service
|
||||
public class IndexDocumentConverterService {
|
||||
|
||||
public IndexDocument convert(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
|
||||
return IndexDocument.builder()
|
||||
.dossierTemplateId(dossierTemplateId)
|
||||
.dossierId(dossierId)
|
||||
.fileId(fileId)
|
||||
.filename(filename)
|
||||
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
|
||||
.date(Date.from(OffsetDateTime.now().toInstant()))
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus.name())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public IndexDocumentUpdate convertUpdateDocument(String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
|
||||
return IndexDocumentUpdate.builder()
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus)
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
|
||||
|
||||
List<IndexFileAttribute> converted = new ArrayList<>();
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
fileAttributes.forEach((key, value) -> converted.add(new IndexFileAttribute(key, value)));
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
|
||||
private IndexSection convert(SectionText sectionText) {
|
||||
|
||||
return IndexSection.builder()
|
||||
.sectionNumber(sectionText.getSectionNumber())
|
||||
.text(sectionText.getText())
|
||||
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
|
||||
.headline(sectionText.getHeadline())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
@ -1,313 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.FAILED_TO_SEARCH;
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.opensearch.action.search.SearchRequest;
|
||||
import org.opensearch.action.search.SearchResponse;
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.opensearch.common.text.Text;
|
||||
import org.opensearch.index.query.BoolQueryBuilder;
|
||||
import org.opensearch.index.query.InnerHitBuilder;
|
||||
import org.opensearch.index.query.NestedQueryBuilder;
|
||||
import org.opensearch.index.query.QueryBuilder;
|
||||
import org.opensearch.index.query.QueryBuilders;
|
||||
import org.opensearch.search.SearchHit;
|
||||
import org.opensearch.search.SearchHits;
|
||||
import org.opensearch.search.builder.SearchSourceBuilder;
|
||||
import org.opensearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class SearchService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.fetchSource(new String[]{"dossierId", "dossierTemplateId", "dossierDeleted", "dossierArchived", "filename", "fileId", "assignee", "dossierStatus", "workflowStatus", "fileAttributes"},
|
||||
new String[]{"sections"})
|
||||
.highlighter(new HighlightBuilder().field("sections.text").field("filename").field("fileAttributes.value").highlighterType("fvh"))
|
||||
.trackScores(true);
|
||||
|
||||
SearchRequest request = new SearchRequest(INDEX_NAME).source(searchSourceBuilder);
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return client.search(searchRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(FAILED_TO_SEARCH, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private QueryBuilder convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
BoolQueryBuilder entireQuery = QueryBuilders.boolQuery();
|
||||
BoolQueryBuilder sectionsQueries = QueryBuilders.boolQuery();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
QueryBuilder textPhraseQuery = QueryBuilders.matchPhraseQuery("sections.text", must.toLowerCase(Locale.ROOT)).queryName(must);
|
||||
QueryBuilder filenamePhraseQuery = QueryBuilders.matchPhrasePrefixQuery("filename", must.toLowerCase(Locale.ROOT)).queryName("filename." + must);
|
||||
QueryBuilder fileAttributesPhraseQuery = QueryBuilders.matchPhraseQuery("fileAttributes.value", must.toLowerCase(Locale.ROOT)).queryName("fileAttributes." + must);
|
||||
|
||||
QueryBuilder filenameOrTextMustQuery = QueryBuilders.boolQuery().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery);
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
QueryBuilder textTermQuery = QueryBuilders.matchPhraseQuery("sections.text", should.toLowerCase(Locale.ROOT)).queryName(should);
|
||||
QueryBuilder filenameTermQuery = QueryBuilders.matchPhrasePrefixQuery("filename", should.toLowerCase(Locale.ROOT)).queryName("filename." + should);
|
||||
QueryBuilder fileAttributesPhraseQuery = QueryBuilders.matchPhraseQuery("fileAttributes.value", should.toLowerCase(Locale.ROOT)).queryName("fileAttributes." + should);
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
NestedQueryBuilder nestedQuery = QueryBuilders.nestedQuery("sections", sectionsQueries, ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setSize(100)
|
||||
.setFetchSourceContext(new FetchSourceContext(true,
|
||||
new String[]{"sections.headline", "sections.sectionNumber", "sections.pages"},
|
||||
new String[]{"sections.text"})));
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
BoolQueryBuilder filterQuery = QueryBuilders.boolQuery();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.boolQuery();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.matchQuery("dossierTemplateId", dossierTemplateId));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder);
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.boolQuery();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.matchQuery("dossierId", dossierId));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder);
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.matchQuery("fileId", fileId));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.matchQuery("assignee", assignee));
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.termsQuery("dossierArchived", true, false));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.termsQuery("dossierArchived", false));
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.termsQuery("dossierDeleted", true, false));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.termsQuery("dossierDeleted", false));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.matchQuery("workflowStatus", workflowStatus));
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.boolQuery();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.matchQuery("fileAttributes.name", fileAttributeKey))
|
||||
.must(QueryBuilders.matchQuery("fileAttributes.value", fileAttributes.get(fileAttributeKey))));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder);
|
||||
}
|
||||
|
||||
return QueryBuilders.boolQuery().filter(filterQuery).must(entireQuery);
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(Arrays.stream(response.getHits().getHits()).map(hit -> convertSearchHit(hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.getHits().getMaxScore())
|
||||
.total(response.getHits().getTotalHits().value)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(SearchHit hit, Query query) {
|
||||
|
||||
Set<String> matchesTerms = Arrays.stream(hit.getMatchedQueries())
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.getScore())
|
||||
.dossierId((String) hit.getSourceAsMap().get("dossierId"))
|
||||
.dossierTemplateId((String) hit.getSourceAsMap().get("dossierTemplateId"))
|
||||
.fileId((String) hit.getSourceAsMap().get("fileId"))
|
||||
.dossierStatus((String) hit.getSourceAsMap().get("dossierStatus"))
|
||||
.assignee((String) hit.getSourceAsMap().get("assignee"))
|
||||
.fileAttributes(convertFileAttributes(hit.getSourceAsMap().get("fileAttributes")))
|
||||
.workflowStatus((String) hit.getSourceAsMap().get("workflowStatus"))
|
||||
.fileName((String) hit.getSourceAsMap().get("fileName"))
|
||||
.dossierDeleted((Boolean) hit.getSourceAsMap().get("dossierDeleted"))
|
||||
.dossierArchived((Boolean) hit.getSourceAsMap().get("dossierArchived"))
|
||||
.highlights(hit.getHighlightFields()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(Map.Entry::getKey, e -> Arrays.stream(e.getValue().getFragments()).map(Text::string).collect(Collectors.toSet()))))
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.getInnerHits() != null) {
|
||||
SearchHits sectionHits = hit.getInnerHits().get("sections");
|
||||
|
||||
matchedDocumentBuilder.matchedSections(Arrays.stream(sectionHits.getHits()).map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.getTotalHits().value == sectionHits.getHits().length);
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(SearchHit hit) {
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline((String) hit.getSourceAsMap().get("headline"))
|
||||
.sectionNumber((Integer) hit.getSourceAsMap().get("sectionNumber"))
|
||||
.pages(new HashSet<>((ArrayList<Integer>) hit.getSourceAsMap().get("pages")))
|
||||
.matchedTerms(Arrays.stream(hit.getMatchedQueries()).collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,34 +1,33 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
import static com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.action.delete.DeleteRequest;
|
||||
import org.opensearch.client.RequestOptions;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch.core.DeleteRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentDeleteService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final EsClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest(INDEX_NAME).id(fileId).setRefreshPolicy(settings.getRefreshPolicy());
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(INDEX_NAME).id(fileId).refresh(settings.getRefreshPolicy()).build();
|
||||
|
||||
try {
|
||||
client.delete(request, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
client.delete(request);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_DELETE_ERROR, fileId), e);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,44 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentIndexService {
|
||||
|
||||
private final EsClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@Timed("redactmanager_indexDocument")
|
||||
public void indexDocument(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
client.index(i -> i.index(INDEX_NAME)
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(settings.getRefreshPolicy())
|
||||
.document(indexDocument));
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_INDEX_ERROR, indexDocument.getFileId()), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,37 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentUpdateService {
|
||||
|
||||
private final EsClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_updateDocument")
|
||||
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
|
||||
|
||||
try {
|
||||
client.update(u -> u.index(INDEX_NAME).id(fileId).doc(indexDocumentUpdate).refresh(settings.getRefreshPolicy()), IndexDocumentUpdate.class);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_UPDATE_ERROR, fileId), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,6 +1,5 @@
|
||||
package com.iqser.red.service.search.v1.server.client;
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
@ -11,13 +10,16 @@ import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.opensearch.client.RestClient;
|
||||
import org.opensearch.client.RestClientBuilder;
|
||||
import org.opensearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import co.elastic.clients.elasticsearch.ElasticsearchClient;
|
||||
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
|
||||
import co.elastic.clients.transport.ElasticsearchTransport;
|
||||
import co.elastic.clients.transport.rest_client.RestClientTransport;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.Delegate;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@ -25,7 +27,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class ElasticsearchClient {
|
||||
public class EsClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
|
||||
@ -33,7 +35,7 @@ public class ElasticsearchClient {
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
@Delegate
|
||||
private RestHighLevelClient client;
|
||||
private ElasticsearchClient client;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
@ -54,18 +56,17 @@ public class ElasticsearchClient {
|
||||
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
|
||||
}
|
||||
|
||||
client = new RestHighLevelClient(builder);
|
||||
ElasticsearchTransport transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
|
||||
|
||||
this.client = new ElasticsearchClient(transport);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void shutdown() {
|
||||
public void onShutdown() {
|
||||
|
||||
try {
|
||||
client.close();
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,89 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.INDEX_EXISTS_ERROR;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.ResourceLoader;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexSettings;
|
||||
import co.elastic.clients.elasticsearch.indices.MappingLimitSettingsNestedObjects;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class IndexCreatorService {
|
||||
|
||||
public static final String INDEX_NAME = "redaction";
|
||||
|
||||
private final EsClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public IndexCreatorService(EsClient client, ElasticsearchSettings settings) {
|
||||
|
||||
this.client = client;
|
||||
this.settings = settings;
|
||||
|
||||
if (!indexExists()) {
|
||||
createIndex();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void createIndex() {
|
||||
|
||||
try {
|
||||
var response = client.indices().create(i -> i.index(INDEX_NAME).settings(createIndexSettings()).mappings(createIndexMapping()));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists() {
|
||||
|
||||
try {
|
||||
var response = client.indices().exists(i -> i.index(INDEX_NAME));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(INDEX_EXISTS_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private TypeMapping createIndexMapping() {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
return new TypeMapping.Builder().withJson(is).build();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private IndexSettings createIndexSettings() {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
return new IndexSettings.Builder().withJson(is)
|
||||
.numberOfShards(settings.getNumberOfShards())
|
||||
.numberOfReplicas(settings.getNumberOfReplicas())
|
||||
.mapping(m -> m.nestedObjects(MappingLimitSettingsNestedObjects.of(a -> a.limit(settings.getNumberOfNestedObjectLimit()))))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,46 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class IndexDeleteService {
|
||||
|
||||
private final EsClient client;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
var closeIndexResponse = client.indices().close(i -> i.index(INDEX_NAME).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
throw new IndexException("Error while closing index");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = client.indices().delete(i -> i.index(INDEX_NAME).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
} else {
|
||||
throw new IndexException("Error while dropping index");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,333 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.FAILED_TO_SEARCH;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
|
||||
import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.SearchResponse;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlightField;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlighterType;
|
||||
import co.elastic.clients.elasticsearch.core.search.Hit;
|
||||
import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
|
||||
import co.elastic.clients.json.JsonData;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import jakarta.json.JsonObject;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class SearchService {
|
||||
|
||||
private final EsClient client;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.source(s -> s.filter(f -> f.includes("dossierId",
|
||||
"dossierTemplateId",
|
||||
"dossierDeleted",
|
||||
"dossierArchived",
|
||||
"filename",
|
||||
"fileId",
|
||||
"assignee",
|
||||
"dossierStatus",
|
||||
"workflowStatus",
|
||||
"fileAttributes")))
|
||||
.highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
|
||||
.trackScores(true)
|
||||
.build();
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return client.search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(FAILED_TO_SEARCH, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
var entireQuery = QueryBuilders.bool();
|
||||
var sectionsQueries = QueryBuilders.bool();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
|
||||
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(must.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + must));
|
||||
|
||||
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
|
||||
var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(should.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + should));
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
|
||||
.queryName("sections")
|
||||
.query(sectionsQueries.build()._toQuery())
|
||||
.path("sections")
|
||||
.innerHits(i -> i.size(100)));
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
var filterQuery = QueryBuilders.bool();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
|
||||
.build()
|
||||
._toQuery()));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
List<Hit> hits = response.hits().hits();
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.hits().maxScore().floatValue())
|
||||
.total(response.hits().total().value())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
|
||||
List<String> m = hit.matchedQueries();
|
||||
|
||||
Set<String> matchesTerms = m.stream()
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.score().floatValue())
|
||||
.dossierId(indexDocument.getDossierId())
|
||||
.dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
.fileId(indexDocument.getFileId())
|
||||
.assignee(indexDocument.getAssignee())
|
||||
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
.workflowStatus(indexDocument.getWorkflowStatus())
|
||||
.fileName(indexDocument.getFilename())
|
||||
.dossierDeleted(indexDocument.isDossierDeleted())
|
||||
.dossierArchived(indexDocument.isDossierArchived())
|
||||
.highlights(hit.highlight())
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
|
||||
JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
|
||||
var jsonArray = indexSection.getJsonArray("pages");
|
||||
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.getString("headline"))
|
||||
.sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
@ -3,11 +3,11 @@ package com.iqser.red.service.search.v1.server.settings;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.opensearch.action.support.WriteRequest;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@ -27,13 +27,13 @@ public class ElasticsearchSettings {
|
||||
|
||||
private String password;
|
||||
|
||||
private int numberOfShards = 5;
|
||||
private int numberOfReplicas = 1;
|
||||
private String numberOfShards = "5";
|
||||
private String numberOfReplicas = "1";
|
||||
private int numberOfNestedObjectLimit = 100000;
|
||||
|
||||
/**
|
||||
* ES refresh policy for write requests to use. Used in tests to wait for completion of write requests.
|
||||
*/
|
||||
private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.NONE;
|
||||
private Refresh refreshPolicy = Refresh.True;
|
||||
|
||||
}
|
||||
|
||||
@ -13,14 +13,14 @@ import lombok.experimental.UtilityClass;
|
||||
@UtilityClass
|
||||
public class ResourceLoader {
|
||||
|
||||
public String load(String classpathPath) {
|
||||
public InputStream load(String classpathPath) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource(classpathPath);
|
||||
if (resource == null) {
|
||||
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath);
|
||||
}
|
||||
try (InputStream is = resource.openStream(); InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr)) {
|
||||
return br.lines().collect(Collectors.joining("\n"));
|
||||
try (InputStream is = resource.openStream()) {
|
||||
return is;
|
||||
} catch (IOException e) {
|
||||
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath, e);
|
||||
}
|
||||
|
||||
@ -20,7 +20,8 @@ import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.Application;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.EsClient;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.IndexCreatorService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
@ -33,7 +34,7 @@ import com.iqser.red.storage.commons.service.StorageService;
|
||||
@DirtiesContext
|
||||
public abstract class AbstractElasticsearchIntegrationTest {
|
||||
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=IMMEDIATE";
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=WaitFor";
|
||||
|
||||
static class Initializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
|
||||
|
||||
@ -64,7 +65,7 @@ public abstract class AbstractElasticsearchIntegrationTest {
|
||||
public static class TestConfiguration {
|
||||
|
||||
@Bean
|
||||
public IndexCreatorService indexCreationService(ElasticsearchClient elasticsearchClient, ElasticsearchSettings elasticsearchSettings) {
|
||||
public IndexCreatorService indexCreationService(EsClient elasticsearchClient, ElasticsearchSettings elasticsearchSettings) {
|
||||
|
||||
return new IndexCreatorService(elasticsearchClient, elasticsearchSettings);
|
||||
|
||||
|
||||
@ -22,6 +22,11 @@ import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.elasticsearch.SearchService;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@ -63,6 +68,9 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
@MockBean
|
||||
private IndexInformationService indexInformationService;
|
||||
|
||||
@Autowired
|
||||
private IndexDocumentConverterService indexDocumentConverterService;
|
||||
|
||||
private final long UPDATE_TIMER = 1500;
|
||||
|
||||
|
||||
@ -73,7 +81,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -83,7 +91,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -108,7 +116,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -148,7 +156,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -158,7 +166,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -180,14 +188,14 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 3
|
||||
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
|
||||
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, WorkflowStatus.NEW.name(), Map.of("F2Key", "F2Value"), 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 4
|
||||
documentUpdateService.updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
|
||||
updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"), 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
|
||||
@ -206,7 +214,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -216,7 +224,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -233,7 +241,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 2
|
||||
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, null, null, 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
|
||||
@ -252,7 +260,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -262,7 +270,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -284,7 +292,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 3
|
||||
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, WorkflowStatus.APPROVED.name(), null, 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
@ -296,7 +304,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 5
|
||||
documentUpdateService.updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, WorkflowStatus.APPROVED.name(), null, 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
|
||||
@ -315,7 +323,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -325,7 +333,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -355,7 +363,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 3
|
||||
documentUpdateService.updateDocument("fileId1", "UserId2", false, true, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
updateDocument("fileId1", "UserId2", false, true, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, null, 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
@ -368,7 +376,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
|
||||
|
||||
// Act & Assert 5
|
||||
documentUpdateService.updateDocument("fileId2", "UserId2", true, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
updateDocument("fileId2", "UserId2", true, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, null, 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
|
||||
@ -389,7 +397,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
|
||||
@ -399,7 +407,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
|
||||
@ -433,7 +441,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
|
||||
|
||||
// Act & Assert 6
|
||||
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1ValueNEW"));
|
||||
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1ValueNEW"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value"), 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
|
||||
@ -444,14 +452,14 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId1");
|
||||
|
||||
// Act & Assert 8
|
||||
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1Value", "F2Key", "F2Value"));
|
||||
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1Value", "F2Key", "F2Value"));
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value", "F2Key", "F2Value"), 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId1");
|
||||
|
||||
// Act & Assert 9
|
||||
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), null);
|
||||
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), null);
|
||||
Thread.sleep(UPDATE_TIMER);
|
||||
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value"), 0, 10, false);
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
|
||||
@ -467,17 +475,8 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
ClassPathResource textResource2 = new ClassPathResource("files/Text2.json");
|
||||
Text text2 = objectMapper.readValue(textResource2.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1",
|
||||
"dossierId1",
|
||||
"fileId1",
|
||||
"Single Study - Oral (Gavage) Mouse.pdf",
|
||||
text,
|
||||
"UserId",
|
||||
false,
|
||||
false,
|
||||
WorkflowStatus.NEW,
|
||||
Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template2",
|
||||
indexDocument("template1", "dossierId1", "fileId1", "Single Study - Oral (Gavage) Mouse.pdf", text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template2",
|
||||
"dossierId2",
|
||||
"fileId2",
|
||||
"S-Metolachlor_RAR_01_Volume_1_2018-09-06.pdf",
|
||||
@ -524,7 +523,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String searchString = "szedhsegkekhglghserlkghrsdvkerxyfdbvkrdjgh";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 1, 10, true);
|
||||
@ -544,7 +543,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String searchString = fileName;
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -566,7 +565,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-blankwalker 42.pdf";
|
||||
String searchString = fileName;
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -589,7 +588,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke.pdf";
|
||||
String searchString = "luke";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -611,7 +610,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "Abamectin";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -633,7 +632,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "prr";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -655,7 +654,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "1";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -677,7 +676,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "1 Abamectin_prr.pdf";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -698,7 +697,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "1 Abamectin";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -719,7 +718,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "1 Abamectin_prr.pdf";
|
||||
String searchString = "_prr.pdf";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -738,8 +737,8 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
String fileName = "116 IDD0000261308.pdf";
|
||||
String filename2 = "115 IDD0000261308.pdf";
|
||||
String searchString = "\"116 IDD\"";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId2", filename2, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId2", filename2, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
|
||||
@ -747,6 +746,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(result.getMatchedDocuments().get(0).getMatchedTerms().size()).isGreaterThan(0);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFilenameWithNumbersMatch() throws IOException {
|
||||
|
||||
@ -754,7 +754,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "VV-733382.pdf";
|
||||
String searchString = "733382";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
|
||||
@ -773,7 +773,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "LUKE.pdf";
|
||||
String searchString = "luke";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -795,7 +795,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke.pdf";
|
||||
String searchString = "luke.pdf";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -817,7 +817,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "Äpfel.pdf";
|
||||
String searchString = "äpfel";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -839,7 +839,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "Äpfel.pdf";
|
||||
String searchString = "äpfel.pdf";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -861,7 +861,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String searchString = "luke";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -883,7 +883,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String searchString = "luke-skywalker-42";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -906,7 +906,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String matchedString = fileName;
|
||||
String searchString = "\"" + matchedString + "\"";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -929,7 +929,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String matchedString = "luke";
|
||||
String searchString = "\"" + matchedString + "\"";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -952,7 +952,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String matchedString = "luke-skywalker-42";
|
||||
String searchString = "\"luke-skywalker-42\"";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -975,7 +975,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
String fileName = "luke-blankwalker 42.pdf";
|
||||
String matchedString = fileName;
|
||||
String searchString = "\"" + matchedString + "\"";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -997,7 +997,7 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
String fileName = "luke-skywalker-42.pdf";
|
||||
String searchString = "14C]-SDS-46851";
|
||||
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
|
||||
|
||||
// Act
|
||||
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
|
||||
@ -1010,4 +1010,37 @@ public class SearchTest extends AbstractElasticsearchIntegrationTest {
|
||||
assertThat(StringUtils.contains(result.getMatchedDocuments().get(0).getHighlights().get("sections.text").toArray()[0].toString(), searchString)).isTrue();
|
||||
}
|
||||
|
||||
|
||||
private void indexDocument(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
|
||||
var indexDocument = indexDocumentConverterService.convert(dossierTemplateId,
|
||||
dossierId,
|
||||
fileId,
|
||||
filename,
|
||||
text,
|
||||
assignee,
|
||||
deleted,
|
||||
archived,
|
||||
workflowStatus,
|
||||
fileAttributes);
|
||||
documentIndexService.indexDocument(indexDocument);
|
||||
}
|
||||
|
||||
|
||||
public void updateDocument(String fileId, String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
|
||||
|
||||
var updateDocument = indexDocumentConverterService.convertUpdateDocument(assignee, deleted, archived, workflowStatus, fileAttributes);
|
||||
documentUpdateService.updateDocument(fileId, updateDocument);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user