Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8f1f1255e |
@ -1,36 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.controller;
|
||||
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
import com.iqser.red.service.search.v1.model.SearchRequest;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.resources.SearchResource;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@RestController
|
||||
@RequiredArgsConstructor
|
||||
public class SearchController implements SearchResource {
|
||||
|
||||
private final SearchService searchService;
|
||||
|
||||
|
||||
public SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest) {
|
||||
|
||||
return searchService.search(searchRequest.getQueryString(),
|
||||
searchRequest.getDossierTemplateIds(),
|
||||
searchRequest.getDossierIds(),
|
||||
searchRequest.getFileId(),
|
||||
searchRequest.getAssignee(),
|
||||
searchRequest.isIncludeDeletedDossiers(),
|
||||
searchRequest.isIncludeArchivedDossiers(),
|
||||
searchRequest.getWorkflowStatus(),
|
||||
searchRequest.getFileAttributes(),
|
||||
searchRequest.getPage(),
|
||||
searchRequest.getPageSize(),
|
||||
searchRequest.isReturnSections());
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.migration;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class MigrationStarterService {
|
||||
|
||||
private final ApplicationContext ctx;
|
||||
private final IndexInformationService indexInformationService;
|
||||
private final IndexingMessageReceiver indexingMessageReceiver;
|
||||
private final SearchServiceSettings settings;
|
||||
private final TenantsClient tenantsClient;
|
||||
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void migrate() {
|
||||
|
||||
// This can only run in post upgrade hook, because otherwise the old service is still runnnig.
|
||||
if (settings.isMigrateOnly()) {
|
||||
tenantsClient.getTenants().forEach(tenant -> {
|
||||
TenantContext.setTenantId(tenant.getTenantId());
|
||||
if (indexInformationService.hasIndexChanged()) {
|
||||
log.info("Index has changed and will be closed, dropped, recreated and all files will be indexed");
|
||||
indexingMessageReceiver.receiveIndexingRequest(IndexMessage.builder().messageType(IndexMessageType.DROP).build());
|
||||
}
|
||||
});
|
||||
System.exit(SpringApplication.exit(ctx, () -> 0));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Embeddings {
|
||||
|
||||
private Map<String, Float[]> embeddings;
|
||||
|
||||
}
|
||||
@ -4,6 +4,7 @@ import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.AllArgsConstructor;
|
||||
@ -18,21 +19,18 @@ import lombok.NoArgsConstructor;
|
||||
@SuppressWarnings("serial")
|
||||
public class IndexDocument implements Serializable {
|
||||
|
||||
private String dossierTemplateId;
|
||||
private String dossierId;
|
||||
|
||||
private String fileId;
|
||||
private String filename;
|
||||
private int sectionNumber;
|
||||
|
||||
private Date date;
|
||||
private String assignee;
|
||||
private boolean dossierDeleted;
|
||||
private boolean dossierArchived;
|
||||
private String workflowStatus;
|
||||
|
||||
@Builder.Default
|
||||
private List<IndexSection> sections = new ArrayList<>();
|
||||
private String text;
|
||||
private Set<Integer> pages;
|
||||
private String headline;
|
||||
|
||||
@Builder.Default
|
||||
private List<IndexFileAttribute> fileAttributes = new ArrayList<>();
|
||||
|
||||
private Float[] wordEmbeddingsVector = new Float[383];
|
||||
|
||||
}
|
||||
|
||||
@ -1,201 +1,201 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.amqp.AmqpRejectAndDontRequeueException;
|
||||
import org.springframework.amqp.core.Message;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.service.TextStorageService;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class IndexingMessageReceiver {
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
private final TextStorageService textStorageService;
|
||||
private final FileStatusClient fileStatusClient;
|
||||
private final DossierClient dossierClient;
|
||||
private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
private final RabbitTemplate rabbitTemplate;
|
||||
|
||||
private final DocumentDeleteService documentDeleteService;
|
||||
private final DocumentUpdateService documentUpdateService;
|
||||
private final DocumentIndexService documentIndexService;
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
private final IndexInformationService indexInformationService;
|
||||
private final IndexDocumentConverterService indexDocumentConverterService;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_QUEUE)
|
||||
public void receiveIndexingRequest(Message message) {
|
||||
|
||||
var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
|
||||
|
||||
// This prevents from endless retries oom errors.
|
||||
if (message.getMessageProperties().isRedelivered()) {
|
||||
throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
|
||||
indexRequest.getDossierId(),
|
||||
indexRequest.getFileId()));
|
||||
}
|
||||
|
||||
receiveIndexingRequest(indexRequest);
|
||||
}
|
||||
|
||||
|
||||
public void receiveIndexingRequest(IndexMessage indexRequest) {
|
||||
|
||||
log.info("Processing indexing request: {}", indexRequest);
|
||||
|
||||
switch (indexRequest.getMessageType()) {
|
||||
case INSERT:
|
||||
var fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
var dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
indexFile(dossier, fileStatus);
|
||||
break;
|
||||
|
||||
case UPDATE:
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
|
||||
var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
fileStatus.getWorkflowStatus().name(),
|
||||
fileStatus.getFileAttributes());
|
||||
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
log.info("Successfully updated {}", indexRequest);
|
||||
break;
|
||||
|
||||
case DROP:
|
||||
indexDeleteService.recreateIndex();
|
||||
addAllDocumentsToIndexQueue();
|
||||
try {
|
||||
indexInformationService.updateIndexInformation();
|
||||
} catch (Exception e) {
|
||||
log.error("Could not update index information", e);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("MessageType '" + indexRequest.getMessageType() + "' does not exist");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_DQL)
|
||||
public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process indexing request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
|
||||
public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
log.info("Processing delete document request: {}", indexRequest);
|
||||
documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
|
||||
public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process delete from index request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
private void indexFile(Dossier dossier, FileModel file) {
|
||||
|
||||
fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
|
||||
Text text = textStorageService.getText(dossier.getId(), file.getId());
|
||||
|
||||
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
|
||||
dossier.getId(),
|
||||
file.getId(),
|
||||
file.getFilename(),
|
||||
text,
|
||||
file.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
file.getWorkflowStatus(),
|
||||
file.getFileAttributes());
|
||||
|
||||
documentIndexService.indexDocument(indexDocument);
|
||||
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
|
||||
log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
|
||||
}
|
||||
|
||||
|
||||
private void addAllDocumentsToIndexQueue() {
|
||||
|
||||
var allDossiers = dossierClient.getAllDossiers(true, true);
|
||||
for (Dossier dossier : allDossiers) {
|
||||
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getDossierStatus(dossier.getId()));
|
||||
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getSoftDeletedDossierStatus(dossier.getId()));
|
||||
}
|
||||
log.info("Successfully added all files from all dossiers to index queue (including archived and deleted)");
|
||||
}
|
||||
|
||||
|
||||
private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
|
||||
|
||||
for (FileModel file : files) {
|
||||
try {
|
||||
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
|
||||
rabbitTemplate.convertAndSend(INDEXING_QUEUE,
|
||||
objectMapper.writeValueAsString(IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build()),
|
||||
message -> {
|
||||
message.getMessageProperties().setPriority(99);
|
||||
return message;
|
||||
});
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
//package com.iqser.red.service.search.v1.server.queue;
|
||||
//
|
||||
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
|
||||
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
|
||||
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
|
||||
//
|
||||
//import java.util.List;
|
||||
//
|
||||
//import org.springframework.amqp.AmqpRejectAndDontRequeueException;
|
||||
//import org.springframework.amqp.core.Message;
|
||||
//import org.springframework.amqp.rabbit.annotation.RabbitHandler;
|
||||
//import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
//import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
//import org.springframework.stereotype.Service;
|
||||
//
|
||||
//import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
//import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
//import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
//import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
//import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
//import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
//import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
//import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
//import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
//import com.iqser.red.service.search.v1.server.model.Text;
|
||||
//import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
//import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
//import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
//import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
//import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
//import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
//import com.iqser.red.service.search.v1.server.service.TextStorageService;
|
||||
//
|
||||
//import lombok.RequiredArgsConstructor;
|
||||
//import lombok.SneakyThrows;
|
||||
//import lombok.extern.slf4j.Slf4j;
|
||||
//
|
||||
//@Slf4j
|
||||
//@Service
|
||||
//@RequiredArgsConstructor
|
||||
//public class IndexingMessageReceiver {
|
||||
//
|
||||
// private final ObjectMapper objectMapper;
|
||||
// private final TextStorageService textStorageService;
|
||||
// private final FileStatusClient fileStatusClient;
|
||||
// private final DossierClient dossierClient;
|
||||
// private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
// private final RabbitTemplate rabbitTemplate;
|
||||
//
|
||||
// private final DocumentDeleteService documentDeleteService;
|
||||
// private final DocumentUpdateService documentUpdateService;
|
||||
// private final DocumentIndexService documentIndexService;
|
||||
// private final IndexDeleteService indexDeleteService;
|
||||
// private final IndexInformationService indexInformationService;
|
||||
// private final IndexDocumentConverterService indexDocumentConverterService;
|
||||
//
|
||||
//
|
||||
// @SneakyThrows
|
||||
// @RabbitHandler
|
||||
// @RabbitListener(queues = INDEXING_QUEUE)
|
||||
// public void receiveIndexingRequest(Message message) {
|
||||
//
|
||||
// var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
|
||||
//
|
||||
// // This prevents from endless retries oom errors.
|
||||
// if (message.getMessageProperties().isRedelivered()) {
|
||||
// throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
|
||||
// indexRequest.getDossierId(),
|
||||
// indexRequest.getFileId()));
|
||||
// }
|
||||
//
|
||||
// receiveIndexingRequest(indexRequest);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// public void receiveIndexingRequest(IndexMessage indexRequest) {
|
||||
//
|
||||
// log.info("Processing indexing request: {}", indexRequest);
|
||||
//
|
||||
// switch (indexRequest.getMessageType()) {
|
||||
// case INSERT:
|
||||
// var fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
// var dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
// indexFile(dossier, fileStatus);
|
||||
// break;
|
||||
//
|
||||
// case UPDATE:
|
||||
// fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
// dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
//
|
||||
// var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
// dossier.getSoftDeletedTime() != null,
|
||||
// dossier.getArchivedTime() != null,
|
||||
// fileStatus.getWorkflowStatus().name(),
|
||||
// fileStatus.getFileAttributes());
|
||||
//
|
||||
// documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
// log.info("Successfully updated {}", indexRequest);
|
||||
// break;
|
||||
//
|
||||
// case DROP:
|
||||
// indexDeleteService.recreateIndex();
|
||||
// addAllDocumentsToIndexQueue();
|
||||
// try {
|
||||
// indexInformationService.updateIndexInformation();
|
||||
// } catch (Exception e) {
|
||||
// log.error("Could not update index information", e);
|
||||
// }
|
||||
// break;
|
||||
//
|
||||
// default:
|
||||
// throw new IllegalArgumentException("MessageType '" + indexRequest.getMessageType() + "' does not exist");
|
||||
// }
|
||||
//
|
||||
// }
|
||||
//
|
||||
//
|
||||
// @RabbitHandler
|
||||
// @RabbitListener(queues = INDEXING_DQL)
|
||||
// public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
|
||||
//
|
||||
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
// fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
// log.info("Failed to process indexing request: {}", indexRequest);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// @RabbitHandler
|
||||
// @RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
|
||||
// public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
|
||||
//
|
||||
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
// log.info("Processing delete document request: {}", indexRequest);
|
||||
// documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
// log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
//
|
||||
// }
|
||||
//
|
||||
//
|
||||
// @RabbitHandler
|
||||
// @RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
|
||||
// public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
|
||||
//
|
||||
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
// fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
// log.info("Failed to process delete from index request: {}", indexRequest);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private void indexFile(Dossier dossier, FileModel file) {
|
||||
//
|
||||
// fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
|
||||
// Text text = textStorageService.getText(dossier.getId(), file.getId());
|
||||
//
|
||||
// var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
|
||||
// dossier.getId(),
|
||||
// file.getId(),
|
||||
// file.getFilename(),
|
||||
// text,
|
||||
// file.getAssignee(),
|
||||
//// dossier.getSoftDeletedTime() != null,
|
||||
// dossier.getArchivedTime() != null,
|
||||
// file.getWorkflowStatus(),
|
||||
// file.getFileAttributes(), null);
|
||||
//
|
||||
// documentIndexService.indexDocument(indexDocument);
|
||||
// fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
|
||||
// log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private void addAllDocumentsToIndexQueue() {
|
||||
//
|
||||
// var allDossiers = dossierClient.getAllDossiers(true, true);
|
||||
// for (Dossier dossier : allDossiers) {
|
||||
// addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getDossierStatus(dossier.getId()));
|
||||
// addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getSoftDeletedDossierStatus(dossier.getId()));
|
||||
// }
|
||||
// log.info("Successfully added all files from all dossiers to index queue (including archived and deleted)");
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
|
||||
//
|
||||
// for (FileModel file : files) {
|
||||
// try {
|
||||
// log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
|
||||
// rabbitTemplate.convertAndSend(INDEXING_QUEUE,
|
||||
// objectMapper.writeValueAsString(IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build()),
|
||||
// message -> {
|
||||
// message.getMessageProperties().setPriority(99);
|
||||
// return message;
|
||||
// });
|
||||
// } catch (JsonProcessingException e) {
|
||||
// throw new RuntimeException(e);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//}
|
||||
|
||||
@ -1,54 +1,36 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexSection;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionArea;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionText;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
|
||||
@Service
|
||||
public class IndexDocumentConverterService {
|
||||
|
||||
public IndexDocument convert(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
public IndexDocument convert(String fileId, String filename, int sectionNr, Set<Integer> pages, String headline, String text, Map<String, String> fileAttributes, Float[] embeddingsVector) {
|
||||
|
||||
return IndexDocument.builder()
|
||||
.dossierTemplateId(dossierTemplateId)
|
||||
.dossierId(dossierId)
|
||||
.fileId(fileId)
|
||||
.filename(filename)
|
||||
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
|
||||
.date(Date.from(OffsetDateTime.now().toInstant()))
|
||||
.sectionNumber(sectionNr)
|
||||
.pages(pages)
|
||||
.headline(headline)
|
||||
.text(text)
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus.name())
|
||||
.wordEmbeddingsVector(embeddingsVector)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public IndexDocumentUpdate convertUpdateDocument(String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
|
||||
|
||||
return IndexDocumentUpdate.builder()
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
@ -68,17 +50,4 @@ public class IndexDocumentConverterService {
|
||||
return converted;
|
||||
}
|
||||
|
||||
private IndexSection convert(SectionText sectionText) {
|
||||
|
||||
return IndexSection.builder()
|
||||
.sectionNumber(sectionText.getSectionNumber())
|
||||
.text(sectionText.getText())
|
||||
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
|
||||
.headline(sectionText.getHeadline())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.index(i -> i.index(TenantContext.getTenantId())
|
||||
.id(indexDocument.getFileId())
|
||||
.id(indexDocument.getFileId()+"_"+indexDocument.getSectionNumber())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
|
||||
@ -1,334 +1,335 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
|
||||
import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.SearchResponse;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlightField;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlighterType;
|
||||
import co.elastic.clients.elasticsearch.core.search.Hit;
|
||||
import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
|
||||
import co.elastic.clients.json.JsonData;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import jakarta.json.JsonObject;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class SearchServiceImpl implements SearchService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.source(s -> s.filter(f -> f.includes("dossierId",
|
||||
"dossierTemplateId",
|
||||
"dossierDeleted",
|
||||
"dossierArchived",
|
||||
"filename",
|
||||
"fileId",
|
||||
"assignee",
|
||||
"dossierStatus",
|
||||
"workflowStatus",
|
||||
"fileAttributes")))
|
||||
.highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
|
||||
.trackScores(true)
|
||||
.build();
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
var entireQuery = QueryBuilders.bool();
|
||||
var sectionsQueries = QueryBuilders.bool();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
|
||||
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(must.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + must));
|
||||
|
||||
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
|
||||
var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(should.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + should));
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
|
||||
.queryName("sections")
|
||||
.query(sectionsQueries.build()._toQuery())
|
||||
.path("sections")
|
||||
.innerHits(i -> i.size(100)));
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
var filterQuery = QueryBuilders.bool();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
|
||||
.build()
|
||||
._toQuery()));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
List<Hit> hits = response.hits().hits();
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.hits().maxScore().floatValue())
|
||||
.total(response.hits().total().value())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
|
||||
List<String> m = hit.matchedQueries();
|
||||
|
||||
Set<String> matchesTerms = m.stream()
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.score().floatValue())
|
||||
.dossierId(indexDocument.getDossierId())
|
||||
.dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
.fileId(indexDocument.getFileId())
|
||||
.assignee(indexDocument.getAssignee())
|
||||
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
.workflowStatus(indexDocument.getWorkflowStatus())
|
||||
.fileName(indexDocument.getFilename())
|
||||
.dossierDeleted(indexDocument.isDossierDeleted())
|
||||
.dossierArchived(indexDocument.isDossierArchived())
|
||||
.highlights(hit.highlight())
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
|
||||
JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
|
||||
var jsonArray = indexSection.getJsonArray("pages");
|
||||
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
//package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
//
|
||||
//import java.io.IOException;
|
||||
//import java.util.ArrayList;
|
||||
//import java.util.HashMap;
|
||||
//import java.util.List;
|
||||
//import java.util.Locale;
|
||||
//import java.util.Map;
|
||||
//import java.util.Set;
|
||||
//import java.util.stream.Collectors;
|
||||
//import java.util.stream.IntStream;
|
||||
//import java.util.stream.Stream;
|
||||
//
|
||||
//import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
//import org.springframework.stereotype.Service;
|
||||
//
|
||||
//import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
//import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
//import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
//import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
//import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
//import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
//import com.iqser.red.service.search.v1.server.model.Query;
|
||||
//import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
//import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
//
|
||||
//import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
//import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
|
||||
//import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
|
||||
//import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
//import co.elastic.clients.elasticsearch.core.SearchResponse;
|
||||
//import co.elastic.clients.elasticsearch.core.search.HighlightField;
|
||||
//import co.elastic.clients.elasticsearch.core.search.HighlighterType;
|
||||
//import co.elastic.clients.elasticsearch.core.search.Hit;
|
||||
//import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
|
||||
//import co.elastic.clients.json.JsonData;
|
||||
//import io.micrometer.core.annotation.Timed;
|
||||
//import io.micrometer.core.instrument.util.StringUtils;
|
||||
//import jakarta.json.JsonObject;
|
||||
//import lombok.RequiredArgsConstructor;
|
||||
//import lombok.extern.slf4j.Slf4j;
|
||||
//
|
||||
//@Slf4j
|
||||
//@Service
|
||||
//@RequiredArgsConstructor
|
||||
//@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
//public class SearchServiceImpl implements SearchService {
|
||||
//
|
||||
// private final EsClientCache clientCache;
|
||||
//
|
||||
//
|
||||
//
|
||||
// @Timed("redactmanager_search")
|
||||
// public SearchResult search(String queryString,
|
||||
// List<String> dossierTemplateIds,
|
||||
// List<String> dossierIds,
|
||||
// String fileId,
|
||||
// String assignee,
|
||||
// boolean includeDeletedDossiers,
|
||||
// boolean includeArchivedDossiers,
|
||||
// String workflowStatus,
|
||||
// Map<String, String> fileAttributes,
|
||||
// int page,
|
||||
// int pageSize,
|
||||
// boolean returnSections) {
|
||||
//
|
||||
// Query query = QueryStringConverter.convert(queryString);
|
||||
//
|
||||
// Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
// highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
// highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
// highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
//
|
||||
// SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
// dossierTemplateIds,
|
||||
// dossierIds,
|
||||
// fileId,
|
||||
// assignee,
|
||||
// includeDeletedDossiers,
|
||||
// includeArchivedDossiers,
|
||||
// workflowStatus,
|
||||
// fileAttributes,
|
||||
// returnSections))
|
||||
// .from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
// .size(getPageSizeOrDefault(pageSize))
|
||||
// .source(s -> s.filter(f -> f.includes("dossierId",
|
||||
// "dossierTemplateId",
|
||||
// "dossierDeleted",
|
||||
// "dossierArchived",
|
||||
// "filename",
|
||||
// "fileId",
|
||||
// "assignee",
|
||||
// "dossierStatus",
|
||||
// "workflowStatus",
|
||||
// "fileAttributes")))
|
||||
// .highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
|
||||
// .trackScores(true)
|
||||
// .build();
|
||||
//
|
||||
// SearchResponse response = execute(request);
|
||||
//
|
||||
// return convert(response, query);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
//
|
||||
// try {
|
||||
// return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
// } catch (IOException e) {
|
||||
// throw IndexException.searchFailed(e);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
|
||||
// List<String> dossierTemplateIds,
|
||||
// List<String> dossierIds,
|
||||
// String fileId,
|
||||
// String assignee,
|
||||
// boolean includeDeletedDossiers,
|
||||
// boolean includeArchivedDossiers,
|
||||
// String workflowStatus,
|
||||
// Map<String, String> fileAttributes,
|
||||
// boolean returnSections) {
|
||||
//
|
||||
// var entireQuery = QueryBuilders.bool();
|
||||
// var sectionsQueries = QueryBuilders.bool();
|
||||
//
|
||||
// for (String must : query.getMusts()) {
|
||||
//
|
||||
// var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
|
||||
// var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
|
||||
// var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
// .query(must.toLowerCase(Locale.ROOT))
|
||||
// .queryName("fileAttributes." + must));
|
||||
//
|
||||
// var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
// entireQuery.must(filenameOrTextMustQuery);
|
||||
// sectionsQueries.should(textPhraseQuery);
|
||||
// }
|
||||
// for (String should : query.getShoulds()) {
|
||||
//
|
||||
// var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
|
||||
// var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
|
||||
// var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
// .query(should.toLowerCase(Locale.ROOT))
|
||||
// .queryName("fileAttributes." + should));
|
||||
// entireQuery.should(textTermQuery);
|
||||
// entireQuery.should(filenameTermQuery);
|
||||
// entireQuery.should(fileAttributesPhraseQuery);
|
||||
// sectionsQueries.should(textTermQuery);
|
||||
// }
|
||||
//
|
||||
// if (returnSections) {
|
||||
// var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
|
||||
// .queryName("sections")
|
||||
// .query(sectionsQueries.build()._toQuery())
|
||||
// .path("sections")
|
||||
// .innerHits(i -> i.size(100)));
|
||||
// entireQuery.should(nestedQuery);
|
||||
// }
|
||||
//
|
||||
// var filterQuery = QueryBuilders.bool();
|
||||
//
|
||||
// if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
//
|
||||
// var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
//
|
||||
// for (var dossierTemplateId : dossierTemplateIds) {
|
||||
// if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
// dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
// }
|
||||
//
|
||||
// if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
//
|
||||
// var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
//
|
||||
// for (var dossierId : dossierIds) {
|
||||
// if (StringUtils.isNotEmpty(dossierId)) {
|
||||
// dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
// }
|
||||
//
|
||||
// if (StringUtils.isNotEmpty(fileId)) {
|
||||
// filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
|
||||
// }
|
||||
//
|
||||
// if (StringUtils.isNotEmpty(assignee)) {
|
||||
// filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
|
||||
// }
|
||||
//
|
||||
// if (includeArchivedDossiers) {
|
||||
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
|
||||
// .terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
// } else {
|
||||
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
// }
|
||||
//
|
||||
// if (includeDeletedDossiers) {
|
||||
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
|
||||
// .terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
// } else {
|
||||
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
// }
|
||||
//
|
||||
// if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
// filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
|
||||
// }
|
||||
//
|
||||
// if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
// var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
//
|
||||
// for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
// if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
// fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
// .must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
|
||||
// .must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
|
||||
// .build()
|
||||
// ._toQuery()));
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
// }
|
||||
//
|
||||
// return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private SearchResult convert(SearchResponse response, Query query) {
|
||||
//
|
||||
// List<Hit> hits = response.hits().hits();
|
||||
//
|
||||
// return SearchResult.builder()
|
||||
// .matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
// .maxScore(response.hits().maxScore().floatValue())
|
||||
// .total(response.hits().total().value())
|
||||
// .build();
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
//
|
||||
// List<String> m = hit.matchedQueries();
|
||||
//
|
||||
// Set<String> matchesTerms = m.stream()
|
||||
// .map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
// .map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
// .collect(Collectors.toSet());
|
||||
//
|
||||
// Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
//
|
||||
// IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
//
|
||||
// MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
// .score(hit.score().floatValue())
|
||||
// .dossierId(indexDocument.getDossierId())
|
||||
// .dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
// .fileId(indexDocument.getFileId())
|
||||
// .assignee(indexDocument.getAssignee())
|
||||
// .fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
// .workflowStatus(indexDocument.getWorkflowStatus())
|
||||
// .fileName(indexDocument.getFilename())
|
||||
// .dossierDeleted(indexDocument.isDossierDeleted())
|
||||
// .dossierArchived(indexDocument.isDossierArchived())
|
||||
// .highlights(hit.highlight())
|
||||
// .matchedTerms(matchesTerms)
|
||||
// .unmatchedTerms(unmatchedTerms);
|
||||
//
|
||||
// if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
// InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
// matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
// .containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
// }
|
||||
//
|
||||
// return matchedDocumentBuilder.build();
|
||||
//
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
//
|
||||
// Map<String, String> fileAttributes = new HashMap<>();
|
||||
//
|
||||
// if (fileAttributesSourceMap != null) {
|
||||
// List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
// list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
// }
|
||||
//
|
||||
// return fileAttributes;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
//
|
||||
// JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
//
|
||||
// var jsonArray = indexSection.getJsonArray("pages");
|
||||
// var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
//
|
||||
// return MatchedSection.builder()
|
||||
// .headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
// .sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
// .pages(pages)
|
||||
// .matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
// .build();
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private int getPageSizeOrDefault(int pageSize) {
|
||||
//
|
||||
// if (pageSize <= 0) {
|
||||
// return 10;
|
||||
// }
|
||||
// return pageSize;
|
||||
// }
|
||||
//
|
||||
//
|
||||
// private int getPageOrDefault(int page) {
|
||||
//
|
||||
// if (page < 0) {
|
||||
// return 0;
|
||||
// }
|
||||
// return page;
|
||||
// }
|
||||
//
|
||||
//}
|
||||
@ -1,40 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.opensearch.client.opensearch.core.DeleteRequest;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(TenantContext.getTenantId()).id(fileId).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())).build();
|
||||
|
||||
try {
|
||||
clientCache.getClient().delete(request);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
throw IndexException.documentDeleteError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@ -1,43 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@Timed("redactmanager_indexDocument")
|
||||
public void indexDocument(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
clientCache.getClient().index(i -> i.index(TenantContext.getTenantId())
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,42 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_updateDocument")
|
||||
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.update(u -> u.index(TenantContext.getTenantId()).id(fileId).doc(indexDocumentUpdate).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())),
|
||||
IndexDocumentUpdate.class);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
throw IndexException.documentUpdateError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,97 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.opensearch.client.json.JsonpMapper;
|
||||
import org.opensearch.client.opensearch._types.mapping.TypeMapping;
|
||||
import org.opensearch.client.opensearch.indices.IndexSettings;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import jakarta.json.stream.JsonParser;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class IndexCreatorServiceImpl {
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void createIndex(OpensearchClient client) {
|
||||
|
||||
if (!indexExists(client)) {
|
||||
|
||||
try {
|
||||
var response = client.indices().create(i -> i.index(TenantContext.getTenantId()).settings(createIndexSettings(client)).mappings(createIndexMapping(client)));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists(OpensearchClient client) {
|
||||
|
||||
try {
|
||||
var response = client.indices().exists(i -> i.index(TenantContext.getTenantId()));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw IndexException.indexExists(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private TypeMapping createIndexMapping(OpensearchClient client) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
|
||||
JsonpMapper mapper = client._transport().jsonpMapper();
|
||||
JsonParser parser = mapper.jsonProvider().createParser(is);
|
||||
|
||||
return TypeMapping._DESERIALIZER.deserialize(parser, mapper);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private IndexSettings createIndexSettings(OpensearchClient client) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
|
||||
JsonpMapper mapper = client._transport().jsonpMapper();
|
||||
JsonParser parser = mapper.jsonProvider().createParser(is);
|
||||
|
||||
var indexSettingsFromJson = IndexSettings._DESERIALIZER.deserialize(parser, mapper);
|
||||
|
||||
// It is not possible to set "index.mapping.nested_objects.limit", OpenSearch seems to not have this param.
|
||||
// Hopefully they don't hava a limit for this, I was not able to find anything.
|
||||
// As elasticsearch has a limit for this, and we can't set it, it seems this is the only reason for now to have both clients.
|
||||
var indexSettings = new IndexSettings.Builder().index(indexSettingsFromJson.index())
|
||||
.numberOfReplicas(client.getSearchConnection().getNumberOfReplicas())
|
||||
.numberOfShards(client.getSearchConnection().getNumberOfShards())
|
||||
.analysis(indexSettingsFromJson.analysis())
|
||||
.build();
|
||||
|
||||
return indexSettings;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,57 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
|
||||
public void recreateIndex() {
|
||||
|
||||
closeIndex();
|
||||
dropIndex();
|
||||
indexCreatorService.createIndex(clientCache.getClient());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
var closeIndexResponse = clientCache.getClient().indices().close(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
throw new IndexException("Error while closing index");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = clientCache.getClient().indices().delete(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
} else {
|
||||
throw new IndexException("Error while dropping index");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,65 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.opensearch.client.RestClient;
|
||||
import org.opensearch.client.RestClientBuilder;
|
||||
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
|
||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||
import org.opensearch.client.transport.rest_client.RestClientTransport;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.experimental.Delegate;
|
||||
|
||||
@Data
|
||||
public class OpensearchClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
|
||||
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
@Delegate
|
||||
private OpenSearchClient client;
|
||||
|
||||
|
||||
public OpensearchClient(SearchConnection searchConnection) {
|
||||
|
||||
HttpHost[] httpHost = searchConnection.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
|
||||
.collect(Collectors.toList())
|
||||
.toArray(new HttpHost[searchConnection.getHosts().size()]);
|
||||
|
||||
RestClientBuilder builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT).setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
|
||||
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
|
||||
}
|
||||
|
||||
var transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
|
||||
|
||||
this.searchConnection = searchConnection;
|
||||
this.client = new OpenSearchClient(transport);
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void onShutdown() {
|
||||
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,86 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Connection;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.EncryptionDecryptionService;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class OpensearchClientCache {
|
||||
|
||||
private final TenantsClient tenantsClient;
|
||||
private final EncryptionDecryptionService encryptionDecryptionService;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
@Value("${multitenancy.client-cache.maximumSize:100}")
|
||||
private Long maximumSize;
|
||||
|
||||
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
|
||||
private Integer expireAfterAccess;
|
||||
|
||||
private LoadingCache<String, Connection> connections;
|
||||
private LoadingCache<Connection, OpensearchClient> clients;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
protected void createCache() {
|
||||
|
||||
connections = CacheBuilder.newBuilder().maximumSize(maximumSize).expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES).build(new CacheLoader<>() {
|
||||
public Connection load(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
var hostsAsString = tenant.getSearchConnection().getHosts().stream().collect(Collectors.joining());
|
||||
return Connection.builder().hosts(hostsAsString).searchConnection(tenant.getSearchConnection()).build();
|
||||
}
|
||||
});
|
||||
|
||||
clients = CacheBuilder.newBuilder()
|
||||
.maximumSize(maximumSize)
|
||||
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<Connection, OpensearchClient>) removal -> {
|
||||
removal.getValue().shutdown();
|
||||
log.info("Closed opensearch client for tenant {}", removal.getKey().getHosts());
|
||||
})
|
||||
.build(new CacheLoader<>() {
|
||||
public OpensearchClient load(Connection key) {
|
||||
|
||||
if (key.getSearchConnection().getPassword() != null) {
|
||||
key.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(key.getSearchConnection().getPassword()));
|
||||
}
|
||||
var client = new OpensearchClient(key.getSearchConnection());
|
||||
log.info("Initialized opensearch client for tenant {}", key);
|
||||
indexCreatorService.createIndex(client);
|
||||
return client;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public OpensearchClient getClient() {
|
||||
|
||||
var connection = connections.get(TenantContext.getTenantId());
|
||||
return clients.get(connection);
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,354 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.opensearch.client.json.JsonData;
|
||||
import org.opensearch.client.opensearch._types.FieldValue;
|
||||
import org.opensearch.client.opensearch._types.query_dsl.ChildScoreMode;
|
||||
import org.opensearch.client.opensearch._types.query_dsl.QueryBuilders;
|
||||
import org.opensearch.client.opensearch.core.SearchRequest;
|
||||
import org.opensearch.client.opensearch.core.SearchResponse;
|
||||
import org.opensearch.client.opensearch.core.search.BuiltinHighlighterType;
|
||||
import org.opensearch.client.opensearch.core.search.HighlightField;
|
||||
import org.opensearch.client.opensearch.core.search.HighlighterType;
|
||||
import org.opensearch.client.opensearch.core.search.Hit;
|
||||
import org.opensearch.client.opensearch.core.search.InnerHitsResult;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import jakarta.json.JsonObject;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class SearchServiceImpl implements SearchService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.source(s -> s.filter(f -> f.includes("dossierId",
|
||||
"dossierTemplateId",
|
||||
"dossierDeleted",
|
||||
"dossierArchived",
|
||||
"filename",
|
||||
"fileId",
|
||||
"assignee",
|
||||
"dossierStatus",
|
||||
"workflowStatus",
|
||||
"fileAttributes")))
|
||||
.highlight(h -> h.type(HighlighterType.of(b -> b.builtin(BuiltinHighlighterType.FastVector))).fields(highlightFieldMap))
|
||||
.trackScores(true)
|
||||
.build();
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private org.opensearch.client.opensearch._types.query_dsl.Query convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
var entireQuery = QueryBuilders.bool();
|
||||
var sectionsQueries = QueryBuilders.bool();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
var textPhraseQuery = QueryBuilders.matchPhrase().field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must).build()._toQuery();
|
||||
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must).build()._toQuery();
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
|
||||
.field("fileAttributes.value")
|
||||
.query(must.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + must)
|
||||
.build()
|
||||
._toQuery();
|
||||
|
||||
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
var textTermQuery = QueryBuilders.matchPhrase().field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should).build()._toQuery();
|
||||
var filenameTermQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should).build()._toQuery();
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
|
||||
.field("fileAttributes.value")
|
||||
.query(should.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + should)
|
||||
.build()
|
||||
._toQuery();
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
var nestedQuery = QueryBuilders.nested()
|
||||
.scoreMode(ChildScoreMode.Avg)
|
||||
.queryName("sections")
|
||||
.query(sectionsQueries.build()._toQuery())
|
||||
.path("sections")
|
||||
.innerHits(i -> i.size(100))
|
||||
.build()
|
||||
._toQuery();
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
var filterQuery = QueryBuilders.bool();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match()
|
||||
.field("dossierTemplateId")
|
||||
.query(q -> q.stringValue(dossierTemplateId))
|
||||
.build()
|
||||
._toQuery());
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match().field("dossierId").query(q -> q.stringValue(dossierId)).build()._toQuery());
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.match().field("fileId").query(q -> q.stringValue(fileId)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.match().field("assignee").query(q -> q.stringValue(assignee)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms()
|
||||
.field("dossierArchived")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
|
||||
.build()
|
||||
._toQuery());
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms().field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms()
|
||||
.field("dossierDeleted")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
|
||||
.build()
|
||||
._toQuery());
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms().field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.match().field("workflowStatus").query(q -> q.stringValue(workflowStatus)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
.must(QueryBuilders.match().field("fileAttributes.name").query(q -> q.stringValue(fileAttributeKey)).build()._toQuery())
|
||||
.must(QueryBuilders.match().field("fileAttributes.value").query(q -> q.stringValue(fileAttributes.get(fileAttributeKey))).build()._toQuery())
|
||||
.build()
|
||||
._toQuery()));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
List<Hit> hits = response.hits().hits();
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.maxScore() == null ? 0 : response.maxScore().floatValue())
|
||||
.total(response.hits().total().value())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
|
||||
List<String> m = hit.matchedQueries();
|
||||
|
||||
Set<String> matchesTerms = m.stream()
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.score().floatValue())
|
||||
.dossierId(indexDocument.getDossierId())
|
||||
.dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
.fileId(indexDocument.getFileId())
|
||||
.assignee(indexDocument.getAssignee())
|
||||
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
.workflowStatus(indexDocument.getWorkflowStatus())
|
||||
.fileName(indexDocument.getFilename())
|
||||
.dossierDeleted(indexDocument.isDossierDeleted())
|
||||
.dossierArchived(indexDocument.isDossierArchived())
|
||||
.highlights(hit.highlight())
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
|
||||
JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
|
||||
var jsonArray = indexSection.getJsonArray("pages");
|
||||
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,11 +1,5 @@
|
||||
{
|
||||
"properties": {
|
||||
"dossierId": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"dossierTemplateId": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"fileId": {
|
||||
"type": "keyword"
|
||||
},
|
||||
@ -14,21 +8,9 @@
|
||||
"term_vector": "with_positions_offsets",
|
||||
"analyzer": "filename_analyzer"
|
||||
},
|
||||
"date": {
|
||||
"type": "date"
|
||||
},
|
||||
"assignee": {
|
||||
"sectionNumber": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"workflowStatus": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"dossierArchived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"dossierDeleted": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"fileAttributes": {
|
||||
"type": "nested",
|
||||
"include_in_parent": true,
|
||||
@ -42,24 +24,21 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"type": "nested",
|
||||
"include_in_parent": true,
|
||||
"properties": {
|
||||
"headline": {
|
||||
"type": "text"
|
||||
},
|
||||
"sectionNumber": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"pages": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"text": {
|
||||
"type": "text",
|
||||
"term_vector": "with_positions_offsets"
|
||||
}
|
||||
}
|
||||
"headline": {
|
||||
"type": "text"
|
||||
},
|
||||
"pages": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"text": {
|
||||
"type": "text",
|
||||
"term_vector": "with_positions_offsets"
|
||||
},
|
||||
"wordEmbeddingsVector": {
|
||||
"type": "dense_vector",
|
||||
"dims": 384,
|
||||
"index": true,
|
||||
"similarity": "cosine"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -34,7 +34,7 @@ import com.iqser.red.storage.commons.service.StorageService;
|
||||
|
||||
@ComponentScan
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
|
||||
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
|
||||
@ContextConfiguration(initializers = {AbstractElasticsearchIntegrationTest.Initializer.class})
|
||||
@EnableFeignClients(basePackageClasses = AbstractElasticsearchIntegrationTest.TestConfiguration.class)
|
||||
@DirtiesContext
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,80 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class IndexTest extends AbstractElasticsearchIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private IndexInformationService indexInformationService;
|
||||
|
||||
@MockBean
|
||||
private IndexInformationClient indexInformationClient;
|
||||
|
||||
@MockBean
|
||||
private FileStatusClient fileStatusClient;
|
||||
|
||||
@MockBean
|
||||
private IndexingMessageReceiver indexingMessageReceiver;
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testGenerateHash() {
|
||||
// Act
|
||||
String hash = indexInformationService.generateIndexConfigurationHash();
|
||||
|
||||
// Assert
|
||||
System.out.println(hash);
|
||||
Assertions.assertNotNull(hash);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashChanged() {
|
||||
// Arrange
|
||||
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash("Some Hash").build();
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertTrue(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashChangedNot() {
|
||||
// Arrange
|
||||
String hash = indexInformationService.generateIndexConfigurationHash();
|
||||
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash(hash).build();
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertFalse(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashDoesNotExist() {
|
||||
// Arrange
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(null);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertTrue(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,91 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.model.SearchRequest;
|
||||
import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.controller.SearchController;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.utils.MetricValidationUtils;
|
||||
|
||||
import io.micrometer.prometheus.PrometheusMeterRegistry;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class MetricsIntegrationTest extends AbstractElasticsearchIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
@Autowired
|
||||
private IndexingMessageReceiver indexingMessageReceiver;
|
||||
@Autowired
|
||||
private PrometheusMeterRegistry prometheusMeterRegistry;
|
||||
@Autowired
|
||||
private SearchController searchController;
|
||||
@MockBean
|
||||
private FileStatusClient fileStatusClient;
|
||||
@MockBean
|
||||
private DossierClient dossierClient;
|
||||
@MockBean
|
||||
private FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
@MockBean
|
||||
private IndexInformationClient indexInformationClient;
|
||||
@MockBean
|
||||
private RabbitTemplate rabbitTemplate;
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testMetrics() {
|
||||
|
||||
var dossier = new Dossier();
|
||||
dossier.setId("1");
|
||||
when(dossierClient.getDossierById("1", true, true)).thenReturn(dossier);
|
||||
|
||||
var file = new FileModel();
|
||||
file.setDossierId("1");
|
||||
file.setId("1");
|
||||
file.setWorkflowStatus(WorkflowStatus.NEW);
|
||||
when(fileStatusClient.getFileStatus("1", "1")).thenReturn(file);
|
||||
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
storageService.storeJSONObject(TenantContext.getTenantId(), TextStorageService.StorageIdUtils.getStorageId("1", "1", FileType.SIMPLIFIED_TEXT), text);
|
||||
|
||||
IndexMessage indexRequest = new IndexMessage();
|
||||
indexRequest.setDossierId("1");
|
||||
indexRequest.setFileId("1");
|
||||
indexRequest.setDossierTemplateId("1");
|
||||
indexRequest.setMessageType(IndexMessageType.INSERT);
|
||||
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
|
||||
|
||||
indexRequest.setMessageType(IndexMessageType.UPDATE);
|
||||
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
|
||||
|
||||
searchController.getDossierStatus(SearchRequest.builder().queryString("test").build());
|
||||
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_indexDocument", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_updateDocument", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_getTextSearchService", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_search", 1, null);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,373 @@
|
||||
{
|
||||
"sectionTexts": [
|
||||
{
|
||||
"text": "European Commission",
|
||||
"sectionNumber": 1
|
||||
},
|
||||
{
|
||||
"text": "Addendum VOLUME 2 Abamectin Rapporteur Member State: The Netherlands April 2015 February 2016 Draft Assessment Report and Proposed decision of the Netherlands prepared in the context of the possible extension of the approval conditions of abamectin under Regulation (EC) 1107/2009 Abamectin – Volume 2 2",
|
||||
"sectionNumber": 2
|
||||
},
|
||||
{
|
||||
"text": "Date Version history April 2015 Initial version February 2015 Revised addendum to DAR in light of comments and additional information received",
|
||||
"sectionNumber": 3
|
||||
},
|
||||
{
|
||||
"text": "Version history page Abamectin – Volume 2 3",
|
||||
"sectionNumber": 4
|
||||
},
|
||||
{
|
||||
"text": "TABLE OF CONTENTS – VOLUME 1 A. List of the tests, studies and information submitted ........................................................................ 4 A.1 Identity ......................................................................................................................................... 4 A.2 Physical and chemical properties ............................................................................................... 4 A.3 Data on application and efficacy ................................................................................................. 6 A.4 Further information .................................................................................................................... 10 A.5 Methods of analysis .................................................................................................................. 10 A.6 Toxicology and metabolism data .............................................................................................. 11 A.7 Residue data ............................................................................................................................. 13 A.8 Environmental fate and behaviour ............................................................................................ 15 A.9 Ecotoxicology data .................................................................................................................... 16 Abamectin – Volume 2 4",
|
||||
"sectionNumber": 5
|
||||
},
|
||||
{
|
||||
"text": "A. List of the tests, studies and information submitted A.1 Identity Refer to Volume 4.",
|
||||
"sectionNumber": 6
|
||||
},
|
||||
{
|
||||
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
|
||||
"sectionNumber": 7
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.1 / 01 & KIIIA1 2.4.1 / 01 & KIIIA1 2.4.2 / 01 & KIIIA1 2.5.2 / 01 & KIIIA1 2.5.3 / 01 & KIIIA1 2.6.1 / 01 & KIIIA1 2.8.6.1 / 01 Kalt R. 2009 A12115I - Physical properties of batch SMU9EP004 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120109 GLP, not published Syngenta File No A12115I_10005 N Y SYN",
|
||||
"sectionNumber": 8
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.2.1 / 01 & KIIIA1 2.2.2 / 01 & KIIIA1 2.3.1 / 01 & KIIIA1 2.3.3 / 01 Jackson W. 2009 A12115I - Physical and chemical properties Syngenta Syngenta Technology & Projects, Huddersfield, United Kingdom, HT09/241 GLP, not published Syngenta File No A12115I_10013 N Y SYN",
|
||||
"sectionNumber": 9
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.7.1 / 01 Kalt R. 2009a A12115I - Storage stability and shelf life statement (2 weeks 54°C) in packaging made of HDPE Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, Not GLP, not published Syngenta File No A12115I_10009 N N SYN",
|
||||
"sectionNumber": 10
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.7.1 / 02 Kalt R. 2009b A12115I - Storage stability and shelf life statement (2 weeks 54°C) in packaging made of PET Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, Not GLP, not published Syngenta File No A12115I_10010 N N SYN",
|
||||
"sectionNumber": 11
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 Kalt R. 2009c A12115I - Technical properties of batch N N SYN",
|
||||
"sectionNumber": 12
|
||||
},
|
||||
{
|
||||
"text": "2.7.4 / 01 & KIIIA1 2.8.2 / 01 & KIIIA1 2.8.3.1 / 01 & KIIIA1 2.8.3.2 / 01 & KIIIA1 2.8.5.2 / 01 & KIIIA1 2.8.8.2 / 01 SMU9EP004 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120251 Not GLP, not published Syngenta File No A12115I_10004",
|
||||
"sectionNumber": 14
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.7.5 / 01 & KIIIA1 4.1.3 / 01 Kalt R. 2011 A12115I - Storage stability and shelf life statement (2 years 25°C) in packaging made of HDPE Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 10478696 Not GLP, not published Syngenta File No A12115I_10039 N N SYN",
|
||||
"sectionNumber": 15
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 2.7.5 / 02 & KIIIA1 4.1.3 / 02 Kalt R. 2011a A12115I - Storage stability and shelf life statement (2 years 25°C) in packaging made of PET Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 10478743 Not GLP, not published Syngenta File No A12115I_10040 N N SYN",
|
||||
"sectionNumber": 16
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 3.3.1 / 01 Briswalter C. 2011 A12115I GAP Syngenta Crop Protection AG, Basel, Switzerland, Not GLP, not published Syngenta File No A12115I_10042 N N SYN",
|
||||
"sectionNumber": 17
|
||||
},
|
||||
{
|
||||
"text": "A.2 Physical and chemical properties Abamectin – Volume 2 5 Abamectin – Volume 2 6",
|
||||
"sectionNumber": 19
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner SYN = Syngenta",
|
||||
"sectionNumber": 20
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6/01 Ivacic D. 2012 A12115I (Tervigo) for nematodes on protected vegetables Syngenta - No Unublished Regulatory document No. A12115I_10165 Y SYN",
|
||||
"sectionNumber": 21
|
||||
},
|
||||
{
|
||||
"text": "",
|
||||
"sectionNumber": 23
|
||||
},
|
||||
{
|
||||
"text": "Abamectin – Volume 2 KIIIA1 6.1.2/001 & KIIIA1 6.1.2/006 & KIIIA1 6.1.3/001 Rafael Munoz 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on Y SYN",
|
||||
"sectionNumber": 24
|
||||
},
|
||||
{
|
||||
"text": "Abamectin – Volume 2 KIIIA1 6.1.2/001 & KIIIA1 6.1.2/006 & KIIIA1 6.1.3/001 vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2172009 GEP Unpublished",
|
||||
"sectionNumber": 25
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/002 & KIIIA1 6.1.3/002 Pedro Vega 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN3172009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 26
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/003 & KIIIA1 6.1.2/015 & KIIIA1 6.1.3/003 & KIIIA1 6.1.3/033 Sotiris Pantazis 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Greece Anadiag Hellas, GR45ZN1232009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 27
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/004 & KIIIA1 6.1.3/004 Antonio Aversa 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN0522009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 28
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/005 & KIIIA1 6.1.3/005 & KIIIA1 6.1.3/011 Pedro Pablo Japón Fu 2009 Terra Nova - registration trials on fruiting vegetables Syngenta CP Spain Phytest Hispania S.l. ESPHZN0202009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 29
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/007 & KIIIA1 6.1.3/017 Ana Orrico Marín 2009 Terra Nova - registration trials on fruiting vegetables Syngenta CP Spain Sicop Spain, ESSCZN0422009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 30
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.2/008 & KIIIA1 6.1.3/019 Rafael Munoz 2009a 7 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2142009 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 31
|
||||
},
|
||||
{
|
||||
"text": "nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2162009 GEP Unpublished",
|
||||
"sectionNumber": 33
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/014 & KIIIA1 6.1.3/027 Pedro Pablo Japón Fu 2011 Tervigo - registration trials on eggplant Syngenta CP Spain Phytest Hispania S.l. ESPHZN0012011 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 34
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/015 Michele Derrico 2011 Tervigo - registration trials on eggplant Syngenta CP Italy Syngenta Crop Protection S.r.l., ITFGZN3302011 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 35
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/016 Asero Giuseppe 2011 Tervigo - registration trials on eggplant Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN1412011 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 36
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/018 Asero Giuseppe 2011a Tervigo - registration trials on peppers Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN2702011 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 37
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/024 Rafael Munoz 2010 Terranova - registration trials on fruiting vegetables Syngenta CP Spain Syngenta Agro SA, ESSEZN2252010 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 38
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/025 Adriano Giansante 2010 Terranova - registration trials on fruiting vegetables Syngenta CP Italy Syngenta Crop Protection S.r.l., ITCEZN2212010 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 39
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 6.1.3/026 Rosario D'Asero 2010 8 Terranova - registration trials on fruiting vegetables Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN2222010 GEP Unpublished Y SYN",
|
||||
"sectionNumber": 40
|
||||
},
|
||||
{
|
||||
"text": "A.3 Data on application and efficacy Abamectin – Volume 2 KIIIA1 6.1.3/012 Rafael Munoz 2009d Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of Y SYN Abamectin – Volume 2 9 Abamectin – Volume 2 10",
|
||||
"sectionNumber": 42
|
||||
},
|
||||
{
|
||||
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
|
||||
"sectionNumber": 43
|
||||
},
|
||||
{
|
||||
"text": "IIIA 4.2.2/01 Kalt, R 2009d A12115I - The effectiveness of the spray tank cleaning procedure Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120365 Not GLP, not published Syngenta File No A12115I_10012 N N SYN",
|
||||
"sectionNumber": 44
|
||||
},
|
||||
{
|
||||
"text": "A.4 Further information",
|
||||
"sectionNumber": 46
|
||||
},
|
||||
{
|
||||
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
|
||||
"sectionNumber": 47
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 5.2.1 / 01 Dos Santos Alves A. 2009 Determination of MK936 in A12115I Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, SF-328/1 Not GLP, not published Syngenta File No A12115I_10017 N N SYN",
|
||||
"sectionNumber": 48
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 5.2.1 / 02 Heintz K. 2009 A12115I - Validation of analytical method SF-328/1 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120106 GLP, not published Syngenta File No A12115I_10018 N Y SYN",
|
||||
"sectionNumber": 49
|
||||
},
|
||||
{
|
||||
"text": "A.5 Methods of analysis Abamectin – Volume 2 11",
|
||||
"sectionNumber": 51
|
||||
},
|
||||
{
|
||||
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Justification if data protection is claimed Owner",
|
||||
"sectionNumber": 52
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.1 / 01 Arcelin G 2009a Abamectin SC (A12115I) - Acute Oral Toxicity Study in the Rat (Up and Down Procedure) Syngenta - Jealott’s Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C31684 GLP, not published Syngenta File No A12115I_10020 Y Y Y SYN",
|
||||
"sectionNumber": 53
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.2 / 01 Arcelin G 2010 Abamectin SC(A12115I) - Acute Toxicity Study in Rats Syngenta - Jealott’s Hill, Bracknell, United Kingdom Harlan Laboratories Ltd., 4414 Fullinsdorf, Switzerland, C79856 GLP, not published Syngenta File No A12115I_10021 Y Y Y SYN",
|
||||
"sectionNumber": 54
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.3 / 01 Shaw D 2009 Abamectin SC (A12115I) - Acute 4 Hour (Nose Only) Inhalation Study In The Rat Syngenta - Jealott’s Hill, Bracknell, United Kingdom Covance Laboratories, Harrogate, United Kingdom, 8202-064, T000153-09 GLP, not published Syngenta File No A12115I_10011 Y Y Y SYN",
|
||||
"sectionNumber": 55
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.4 / 01 Arcelin G. 2009b Abamectin SC (A121151) - Primary Skin Irritation Study in Rabbits (4 Hour Semi-Occlusive Application) Syngenta RCC Ltd., Füllinsdorf, Switzerland, C46613 GLP, not published Syngenta File No A12115I_10015 Y Y Y SYN",
|
||||
"sectionNumber": 56
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.5 / 01 Arcelin G. 2009c Abamectin SC (A121151) - Primary Eye Irritation Study in Rabbits Y Y Y SYN",
|
||||
"sectionNumber": 57
|
||||
},
|
||||
{
|
||||
"text": "Syngenta RCC Ltd., Füllinsdorf, Switzerland, C46624 GLP, not published Syngenta File No A12115I_10016",
|
||||
"sectionNumber": 59
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 7.1.6 / 01 Arcelin G 2009d Abamectin SC (A12115I) - Contact Hypersensitivty in Albino Guinea Pigs, Buehler Test (9-induction) Syngenta - Jealott’s Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C46635 GLP, not published Syngenta File No A12115I_10019 Y Y Y SYN",
|
||||
"sectionNumber": 60
|
||||
},
|
||||
{
|
||||
"text": "A.6 Toxicology and metabolism data Abamectin – Volume 2 12 Abamectin – Volume 2 13",
|
||||
"sectionNumber": 62
|
||||
},
|
||||
{
|
||||
"text": "OECD data point number / reference number Author(s) Year Title Source (where different from company) Company, Report No GLP or GEP status (where relevant), Published or not Data Protection Claimed Y/N Owner",
|
||||
"sectionNumber": 63
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.1.1 / 01 Kwiatkowski A., Hill S. 2007 Abamectin - Storage Stability in Crops Stored Deep Frozen for up to Two Years - Final Report Syngenta Crop Protection AG, Basel, Switzerland Syngenta - Jealott’s Hill International, Bracknell, Berkshire, United Kingdom, T022438-04-REG 05-S504 GLP, not published Syngenta File No MK936/1798 Y SYN",
|
||||
"sectionNumber": 64
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.3.1 / 01 Schulz H. 2010a Abamectin - Residue Study on Protected Tomatoes in Germany and the United Kingdom in 2009 Syngenta SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001014- 09-REG GLP, not published Syngenta File No A12115I_10028 Y SYN",
|
||||
"sectionNumber": 65
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.3.2 / 01 Schulz H. 2010b Abamectin - Residue Study on Protected Pepper in Germany and the United Kingdom in 2009 Syngenta - Jealott’s Hill, Bracknell, United Kingdom SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001977- 09-REG GLP, not published Syngenta File No A12115I_10027 Y SYN",
|
||||
"sectionNumber": 66
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.3.4 / 01 Schulz H 2010c Abamectin - Residue Study on Protected Cucumber in Germany and the United Kingdom in 2009 Syngenta - Jealott’s Hill, Bracknell, United Kingdom SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001016- 09-REG GLP, not published Syngenta File No A12115I_10026 Y SYN",
|
||||
"sectionNumber": 67
|
||||
},
|
||||
{
|
||||
"text": "OECD data point number / reference number Author(s) Year Title Source (where different from company) Company, Report No GLP or GEP status (where relevant), Published or not Data Protection Claimed Y/N Owner",
|
||||
"sectionNumber": 69
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.3.5 / 01 Schulz H. 2012a Abamectin - Residue Study on Protected Melon in Southern France and Spain in 2011 Syngenta SGS Institut Fresenius GmbH, Taunusstein, Germany, TK0055923-REG GLP, not published Syngenta File No A12115I_10064 Y SYN",
|
||||
"sectionNumber": 70
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 8.3.6 / 01 Schulz H. 2012b Abamectin - Residue Study on Protected Beans with Pods (Fresh) in Spain in 2011 Syngenta SGS Institut Fresenius GmbH, Taunusstein, Germany, TK0055927-REG GLP, not published Syngenta File No A12115I_10063 Y SYN",
|
||||
"sectionNumber": 71
|
||||
},
|
||||
{
|
||||
"text": "A.7 Residue data Abamectin – Volume 2 14 Abamectin – Volume 2 15",
|
||||
"sectionNumber": 73
|
||||
},
|
||||
{
|
||||
"text": "Annex point(s) Author(s) Year Title Source Report ID GLP or GEP status Published or not Data Protection Claimed Y/N Owner",
|
||||
"sectionNumber": 74
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 9.6.1 / 01 Mason D, Li Z 2010 Abamectin - A Leaching Assessment for Parent and Soil Metabolites NOA448111, NOA448112, NOA457464 and NOA457465 Using the FOCUS Groundwater Scenarios Following Application to Grass Syngenta - Jealott’s Hill, Bracknell, United Kingdom Syngenta - Jealott’s Hill, Bracknell, United Kingdom, RAJ0775B Not GLP, not published Syngenta File No NOA422601_10007 This is CONFIDENTIAL INFORMATION* N SYN",
|
||||
"sectionNumber": 75
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 9.6.1 / 02 Mason D 2010 Abamectin - Predicted Environmental Concentrations in Groundwater for Abamectin using the FOCUS Groundwater Scenarios Following Application to Grass Syngenta - Jealott’s Hill, Bracknell, United Kingdom Syngenta - Jealott’s Hill, Bracknell, United Kingdom, TK0006924_1 Not GLP, not published Syngenta File No NOA422601_10016 This is CONFIDENTIAL INFORMATION* N SYN",
|
||||
"sectionNumber": 76
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 9.6.1 / 03 Wallace D. 2012 Abamectin - Predicted Environmental Concentration of metabolite U8 in ground water Syngenta European Product Registration, Basel, Switzerland, Not GLP, not published Syngenta File No NOA422601_10021 This is CONFIDENTIAL INFORMATION* N SYN",
|
||||
"sectionNumber": 77
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 9.6.1 / 04 Carnall J. 2014 Abamectin - A Leaching Assessment for Parent and Soil Metabolites NOA448111, NOA448112, NOA457464 and NOA457465 Using the FOCUS Groundwater Scenarios Following Drip Irrigation Application to Beans and Tomatoes in the EU DRAFT Report Y SYN",
|
||||
"sectionNumber": 78
|
||||
},
|
||||
{
|
||||
"text": "KIIIA 9.6.2/1 Patterson D. 2014 Abamectin A Fate assessment for Parent using the FOCUS Surface Water Scenarios at Step 3 Following Application to Walk-in Tunnel Cultivated Fruiting Vegetables. Y SYN",
|
||||
"sectionNumber": 79
|
||||
},
|
||||
{
|
||||
"text": "A.8 Environmental fate and behaviour * Syngenta requests data confidentiality for these data. Disclosure of the information might undermine Syngenta commercial interests by providing access to Syngenta specific know-how used to develop unique positions and approaches to risk assessments Abamectin – Volume 2 16",
|
||||
"sectionNumber": 81
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
|
||||
"sectionNumber": 82
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.1.6 / 01 Hubbard P., Beavers J. 2011 Abamectin SC (A12115I) - An acute oral toxicity study with the northern bobwhite using a sequential testing procedure Syngenta Wildlife International Ltd., Easton, Maryland 21601, USA, 528-362 GLP, not published Syngenta File No A12115I_10034 Y SYN",
|
||||
"sectionNumber": 83
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.2.2.1 / 01 Liedtke A. 2011 Abamectin SC (A12115I) - Acute toxicity to rainbow trout (Oncorhynchus mykiss) in a 96-hour test Syngenta Harlan Laboratories Ltd., Itingen, Switzerland, D36411 GLP, not published Syngenta File No A12115I_10041 Y SYN",
|
||||
"sectionNumber": 84
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.2.2.2 / 01 Hoger S 2010 Abamectin SC (A12115I) - Acute toxicity to Daphnia magna in a 48-hour immobilization test Syngenta - Jealott’s Hill, Bracknell, United Kingdom Harlan Laboratories Ltd., Zelgliweg 1, 4452 Itingen, Switzerland, C86663 GLP, not published Syngenta File No A12115I_10025 Y SYN",
|
||||
"sectionNumber": 85
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
|
||||
"sectionNumber": 87
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.2.2.3 / 01 Liedtke A. 2011a Abamectin SC (A12115I) - Toxicity to Pseudokirchneriella subcapitata in a 96-hour algal growth inhibition test Syngenta Harlan Laboratories Ltd., Itingen, Switzerland, D36398 GLP, not published Syngenta File No A12115I_10038 Y SYN",
|
||||
"sectionNumber": 88
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.2.3 / 01 Rufli H. 1999 Assessment of the potential biological effects of Abamectin (MK936, 018 EC) (A-8612 A) exposures on aquatic ecosystems as measured in an outdoor microcosm tank system Novartis Crop Protection AG, Basel, Switzerland Novartis Crop Protection AG, Basel, Switzerland, 982570 GLP, not published Syngenta File No MK936/0638 Y SYN",
|
||||
"sectionNumber": 89
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.2.3 / 02 Knauer K. 2002 Assessment of the Effects of Abamectin 018 EC (A8612A) in Outdoor Microcosms Syngenta Crop Protection AG, Basel, Switzerland Syngenta Crop Protection AG, Basel, Switzerland, 2002590 GLP, not published Syngenta File No MK936/0817 Y SYN",
|
||||
"sectionNumber": 90
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
|
||||
"sectionNumber": 92
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.3.2.1 / 01 Arcelin G. 2009 Abamectin SC (A12115I) - Acute Oral Toxicity Study in the Rat (Up and Down Procedure) Syngenta - Jealott’s Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C31684 GLP, not published Syngenta File No A12115I_10020 Y SYN",
|
||||
"sectionNumber": 93
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.4.2.1 / 01 Kling A. 2011 Abamectin SC (A12115I) - Acute oral and contact toxicity to the honeybee Apis mellifera L. in the laboratory Syngenta Eurofins Agroscience Services GmbH, NiefernÖschel., Germany, S11- 02876 GLP, not published Syngenta File No A12115I_10035 Y SYN",
|
||||
"sectionNumber": 94
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.5.2 / 01 Fussell S. 2004 MK936 (abamectin): a rateresponse extended laboratory test to determine the effects of an 18 g/L EC formulation (A8612AB) on the parasitic wasp Aphidius rhopalosiphi Syngenta Crop Protection AG, Basel, Switzerland Mambo-Tox. Ltd., Southampton, United Kingdom, SYN-04-1 2032631 GLP, not published Syngenta File No MK936/1105 Y SYN",
|
||||
"sectionNumber": 95
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
|
||||
"sectionNumber": 97
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.5.2 / 02 Waterman L. 2004 MK936 (abamectin): A rateresponse extended laboratory test to determine the effects of an 18 g/L EC formulation (A8612AB) on the predatory mite Typhlodromus pyri Syngenta Crop Protection AG, Basel, Switzerland Mambo-Tox. Ltd., Southampton, United Kingdom, SYN-04-2 GLP, not published Syngenta File No MK936/1106 Y SYN",
|
||||
"sectionNumber": 98
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.5.2 / 03 Reber B. 1999 Acute toxicity of MK 936 EC 018 (A-8612 A) to the predatory ground beetle Poecilus cupreus L. (Coleoptera: carabidae) Novartis Crop Protection AG, Basel, Switzerland Novartis Crop Protection AG, Basel, Switzerland, 982611 GLP, not published Syngenta File No MK936/0626 Y SYN",
|
||||
"sectionNumber": 99
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.5.2 / 04 Kuhner C. 1998 Vertimec EC 0.18 (A-8612 A): combination of a semifield and an extented laboratory study (field aged residue) to evaluate the effects on the ground beetle, poecilus cupreus L. (Coleoptera, Carabidae) Novartis Crop Protection AG, Basel, Switzerland GAB Biotechnologie GmbH, Niefern, Germany, 98247/01-NEPc GLP, not published Syngenta File No MK936/0540 Y SYN",
|
||||
"sectionNumber": 100
|
||||
},
|
||||
{
|
||||
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
|
||||
"sectionNumber": 102
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 10.6.2 / 01 Friedrich S. 2011 Abamectin SC (A12115I) - Acute toxicity to the earthworm Eisenia fetida Syngenta BioChem Agrar, Gerichshain, Germany, 11 10 48 099 S GLP, not published Syngenta File No A12115I_10037 Y SYN",
|
||||
"sectionNumber": 103
|
||||
},
|
||||
{
|
||||
"text": "10.6.6 McCormac, A 2014 Abamectin SC (A13796I) – A laboratory test to determine the effects of fresh residues on the springtail Folsomia candida (Collembola, Isotomidae), Syngenta Ltd., Report Number SYN-14-3, GLP, not published Y SYN",
|
||||
"sectionNumber": 104
|
||||
},
|
||||
{
|
||||
"text": "10.6.6 Vinall, S 2014 Abamectin SC (A13796I) – A laboratory test to determine the effects of fresh residues on the predatory mite Hypoaspis aculeifer (Acari, Laelapidae), Syngenta Ltd. Report Number SYN-14-2, GLP, Not published Y SYN",
|
||||
"sectionNumber": 105
|
||||
},
|
||||
{
|
||||
"text": "A.9 Ecotoxicology data Abamectin – Volume 2 17 Abamectin – Volume 2 18 Abamectin – Volume 2 19 Abamectin – Volume 2 20",
|
||||
"sectionNumber": 107
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 Rafael 2009b Abamectin - Fe chelate - Y SYN",
|
||||
"sectionNumber": 108
|
||||
},
|
||||
{
|
||||
"text": "KIIIA1 Pedro Janer 2011 Tervigo - registration trials on Y SYN",
|
||||
"sectionNumber": 109
|
||||
}
|
||||
],
|
||||
"numberOfPages": 20
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user