Compare commits

..

4 Commits

Author SHA1 Message Date
Dominique Eifländer
e66c9df133 Update pom.xml 2023-10-26 15:51:46 +02:00
Christoph Schabert
d805f9d730 Update 6 files
- /bamboo-specs/src/main/java/buildjob/PlanSpec.java
- /bamboo-specs/src/main/resources/scripts/build-java.sh
- /bamboo-specs/src/main/resources/scripts/sonar-java.sh
- /bamboo-specs/src/test/java/buildjob/PlanSpecTest.java
- /bamboo-specs/pom.xml
- /pom.xml
2023-10-26 15:27:47 +02:00
Christoph Schabert
ea36cbac0a Add new file 2023-10-26 15:04:54 +02:00
Dominique Eifländer
6aed99fce0 hotfix: readded fallback if files are stored uncompressed 2023-10-26 14:47:23 +02:00
87 changed files with 1588 additions and 4402 deletions

9
.gitignore vendored
View File

@ -26,12 +26,3 @@
**/.DS_Store
**/classpath-data.json
**/dependencies-and-licenses-overview.txt
gradle.properties
gradlew
gradlew.bat
gradle/
**/.gradle
**/build

View File

@ -3,21 +3,4 @@ variables:
include:
- project: 'gitlab/gitlab'
ref: 'main'
file: 'ci-templates/gradle_java.yml'
deploy:
stage: deploy
tags:
- dind
script:
- echo "Building with gradle version ${BUILDVERSION}"
- gradle -Pversion=${BUILDVERSION} publish
- gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=${BUILDVERSION}
- echo "BUILDVERSION=$BUILDVERSION" >> version.env
artifacts:
reports:
dotenv: version.env
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH =~ /^release/
- if: $CI_COMMIT_TAG
file: 'ci-templates/maven_java.yml'

View File

@ -1,7 +0,0 @@
plugins {
`kotlin-dsl`
}
repositories {
gradlePluginPortal()
}

View File

@ -1,60 +0,0 @@
plugins {
`java-library`
`maven-publish`
pmd
checkstyle
jacoco
}
repositories {
mavenLocal()
mavenCentral()
maven {
url = uri("https://nexus.knecon.com/repository/gindev/");
credentials {
username = providers.gradleProperty("mavenUser").getOrNull();
password = providers.gradleProperty("mavenPassword").getOrNull();
}
}
}
group = "com.iqser.red.service"
java.sourceCompatibility = JavaVersion.VERSION_17
java.targetCompatibility = JavaVersion.VERSION_17
pmd {
isConsoleOutput = true
}
tasks.pmdMain {
pmd.ruleSetFiles = files("${rootDir}/config/pmd/pmd.xml")
}
tasks.pmdTest {
pmd.ruleSetFiles = files("${rootDir}/config/pmd/test_pmd.xml")
}
tasks.named<Test>("test") {
useJUnitPlatform()
reports {
junitXml.outputLocation.set(layout.buildDirectory.dir("reports/junit"))
}
}
tasks.test {
finalizedBy(tasks.jacocoTestReport) // report is always generated after tests run
}
tasks.jacocoTestReport {
dependsOn(tasks.test) // tests are required to run before generating the report
reports {
xml.required.set(true)
csv.required.set(false)
html.outputLocation.set(layout.buildDirectory.dir("jacocoHtml"))
}
}
java {
withJavadocJar()
}

View File

@ -1,39 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
<module name="Checker">
<property
name="severity"
value="error"/>
<module name="TreeWalker">
<module name="SuppressWarningsHolder"/>
<module name="MissingDeprecated"/>
<module name="MissingOverride"/>
<module name="AnnotationLocation"/>
<module name="JavadocStyle"/>
<module name="NonEmptyAtclauseDescription"/>
<module name="IllegalImport"/>
<module name="RedundantImport"/>
<module name="RedundantModifier"/>
<module name="EmptyBlock"/>
<module name="DefaultComesLast"/>
<module name="EmptyStatement"/>
<module name="EqualsHashCode"/>
<module name="ExplicitInitialization"/>
<module name="IllegalInstantiation"/>
<module name="ModifiedControlVariable"/>
<module name="MultipleVariableDeclarations"/>
<module name="PackageDeclaration"/>
<module name="ParameterAssignment"/>
<module name="SimplifyBooleanExpression"/>
<module name="SimplifyBooleanReturn"/>
<module name="StringLiteralEquality"/>
<module name="OneStatementPerLine"/>
<module name="FinalClass"/>
<module name="ArrayTypeStyle"/>
<module name="UpperEll"/>
<module name="OuterTypeFilename"/>
</module>
<module name="FileTabCharacter"/>
<module name="SuppressWarningsFilter"/>
</module>

View File

@ -1,20 +0,0 @@
<?xml version="1.0"?>
<ruleset name="Custom ruleset"
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
<description>
Knecon ruleset checks the code for bad stuff
</description>
<rule ref="category/java/errorprone.xml">
<exclude name="MissingSerialVersionUID"/>
<exclude name="AvoidLiteralsInIfCondition"/>
<exclude name="AvoidDuplicateLiterals"/>
<exclude name="NullAssignment"/>
<exclude name="AssignmentInOperand"/>
<exclude name="BeanMembersShouldSerialize"/>
</rule>
</ruleset>

View File

@ -1,22 +0,0 @@
<?xml version="1.0"?>
<ruleset name="Custom ruleset"
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
<description>
Knecon test ruleset checks the code for bad stuff
</description>
<rule ref="category/java/errorprone.xml">
<exclude name="MissingSerialVersionUID"/>
<exclude name="AvoidLiteralsInIfCondition"/>
<exclude name="AvoidDuplicateLiterals"/>
<exclude name="NullAssignment"/>
<exclude name="AssignmentInOperand"/>
<exclude name="TestClassWithoutTestCases"/>
<exclude name="BeanMembersShouldSerialize"/>
</rule>
</ruleset>

View File

@ -1 +0,0 @@
version = 2.0-SNAPSHOT

20
pom.xml Normal file
View File

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<module>search-service-v1</module>
<module>search-service-image-v1</module>
</modules>
</project>

View File

@ -1,15 +0,0 @@
#!/bin/bash
dir=${PWD##*/}
gradle assemble
# Get the current Git branch
branch=$(git rev-parse --abbrev-ref HEAD)
# Get the short commit hash (first 5 characters)
commit_hash=$(git rev-parse --short=5 HEAD)
# Combine branch and commit hash
buildName="${USER}-${branch}-${commit_hash}"
gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=$buildName
echo "nexus.knecon.com:5001/red/${dir}-server-v1:$buildName"

View File

@ -1,6 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:base"
]
}

View File

@ -0,0 +1,98 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>com.knecon.fforesight</groupId>
<artifactId>platform-docker-dependency</artifactId>
<version>0.1.0</version>
<relativePath/>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service-image-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
<service.server>search-service-server-v1</service.server>
<platform.jar>${service.server}.jar</platform.jar>
<docker.skip.push>false</docker.skip.push>
<docker.image.name>${docker.image.prefix}/${service.server}</docker.image.name>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>download-platform-jar</id>
<phase>prepare-package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>${service.server}</artifactId>
<version>${project.version}</version>
<type>jar</type>
<overWrite>true</overWrite>
<destFileName>${platform.jar}</destFileName>
</dependency>
</artifactItems>
<outputDirectory>${docker.build.directory}</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<configuration>
<images>

</images>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>

View File

@ -0,0 +1,7 @@
FROM red/base-image:2.0.0
ARG PLATFORM_JAR
ENV PLATFORM_JAR ${PLATFORM_JAR}
COPY ["${PLATFORM_JAR}", "/"]

99
search-service-v1/pom.xml Normal file
View File

@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>platform-dependency</artifactId>
<groupId>com.iqser.red</groupId>
<version>1.13.0</version>
<relativePath/>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<module>search-service-api-v1</module>
<module>search-service-server-v1</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.iqser.red</groupId>
<artifactId>platform-commons-dependency</artifactId>
<version>1.20.0</version>
<scope>import</scope>
<type>pom</type>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.sonarsource.scanner.maven</groupId>
<artifactId>sonar-maven-plugin</artifactId>
<version>3.9.0.2155</version>
</plugin>
<plugin>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<version>6.3.1</version>
<configuration>
<format>ALL</format>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.8</version>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<goals>
<goal>report-aggregate</goal>
</goals>
<phase>verify</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,28 +0,0 @@
plugins {
id("com.iqser.red.service.java-conventions")
id("io.freefair.lombok") version "8.4"
}
description = "search-service-api-v1"
dependencies {
implementation("org.springframework:spring-web:6.0.6")
testImplementation("org.springframework.boot:spring-boot-starter-test:3.1.5")
}
publishing {
publications {
create<MavenPublication>(name) {
from(components["java"])
}
}
repositories {
maven {
url = uri("https://nexus.knecon.com/repository/red-platform-releases/")
credentials {
username = providers.gradleProperty("mavenUser").getOrNull();
password = providers.gradleProperty("mavenPassword").getOrNull();
}
}
}
}

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<artifactId>search-service-api-v1</artifactId>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
</project>

View File

@ -32,7 +32,7 @@ public class MatchedDocument {
private String fileName;
@Builder.Default
private Map<String, List<String>> highlights = new HashMap<>();
private Map<String, Set<String>> highlights = new HashMap<>();
@Builder.Default
private Set<String> matchedTerms = new HashSet<>();

View File

@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor
public class MatchedSection {
private String sectionNumber;
private int sectionNumber;
private String headline;
@Builder.Default

View File

@ -3,7 +3,6 @@ package com.iqser.red.service.search.v1.resources;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
@ -18,7 +17,7 @@ public interface SearchResource {
@ResponseBody
@ResponseStatus(value = HttpStatus.OK)
@PostMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
@GetMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest);
}

View File

@ -1,17 +0,0 @@
package com.iqser.red.service.search.v1;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.Test;
public class IdentityTest {
@Test
public void mockTest() {
int i = 1;
assertThat(i).isEqualTo(1);
}
}

View File

@ -1,73 +0,0 @@
import org.springframework.boot.gradle.tasks.bundling.BootBuildImage
plugins {
application
id("com.iqser.red.service.java-conventions")
id("org.springframework.boot") version "3.1.5"
id("io.spring.dependency-management") version "1.1.3"
id("org.sonarqube") version "4.4.1.3373"
id("io.freefair.lombok") version "8.4"
}
description = "search-service-server-v1"
configurations {
all {
exclude(group = "commons-logging", module = "commons-logging")
exclude(group = "org.springframework.boot", module = "spring-boot-starter-log4j2")
exclude(group = "com.iqser.red.commons", module = "logging-commons")
}
}
val springBootStarterVersion = "3.1.5"
dependencies {
api("com.knecon.fforesight:tenant-commons:0.30.0")
api("com.knecon.fforesight:tracing-commons:0.5.0")
api("com.knecon.fforesight:lifecycle-commons:0.6.0")
api("com.google.guava:guava:31.1-jre")
api("com.iqser.red.commons:storage-commons:2.45.0")
api(project(":search-service-api-v1"))
api("com.iqser.red.service:persistence-service-internal-api-v1:2.576.0-RED10106.0")
api("com.iqser.red.commons:spring-commons:2.1.0")
api("com.iqser.red.commons:metric-commons:2.1.0")
api("com.iqser.red.commons:jackson-commons:2.1.0")
api("co.elastic.clients:elasticsearch-java:8.6.2")
api("org.opensearch.client:opensearch-rest-client:2.6.0")
api("org.opensearch.client:opensearch-java:2.3.0")
api("jakarta.json:jakarta.json-api:2.1.1")
api("org.springframework.cloud:spring-cloud-starter-openfeign:4.0.4")
api("org.springframework.boot:spring-boot-starter-aop:${springBootStarterVersion}")
api("org.springframework.boot:spring-boot-starter-amqp:${springBootStarterVersion}")
api("net.logstash.logback:logstash-logback-encoder:7.4")
api("ch.qos.logback:logback-classic")
testImplementation("org.springframework.boot:spring-boot-starter-test:${springBootStarterVersion}")
testImplementation("com.iqser.red.commons:test-commons:2.1.0")
testImplementation("org.testcontainers:elasticsearch:1.17.6")
testImplementation("org.opensearch:opensearch-testcontainers:2.0.0")
testImplementation("org.springframework.amqp:spring-rabbit-test:3.0.2")
}
tasks.named<BootBuildImage>("bootBuildImage") {
environment.put("BPE_DELIM_JAVA_TOOL_OPTIONS", " ")
environment.put("BPE_APPEND_JAVA_TOOL_OPTIONS", "-Dfile.encoding=UTF-8")
imageName.set("nexus.knecon.com:5001/red/${project.name}:${project.version}")
if (project.hasProperty("buildbootDockerHostNetwork")) {
network.set("host")
}
docker {
if (project.hasProperty("buildbootDockerHostNetwork")) {
bindHostToBuilder.set(true)
}
verboseLogging.set(true)
publishRegistry {
username.set(providers.gradleProperty("mavenUser").getOrNull())
password.set(providers.gradleProperty("mavenPassword").getOrNull())
email.set(providers.gradleProperty("mavenEmail").getOrNull())
url.set("https://nexus.knecon.com:5001/")
}
}
}

View File

@ -0,0 +1,169 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>1.0-SNAPSHOT</version>
</parent>
<artifactId>search-service-server-v1</artifactId>
<properties>
<persistence-service.version>1.356.0</persistence-service.version>
</properties>
<dependencies>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>storage-commons</artifactId>
<version>1.8.5</version>
</dependency>
<dependency>
<groupId>com.iqser.red.service</groupId>
<artifactId>search-service-api-v1</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.iqser.red.service</groupId>
<artifactId>persistence-service-api-v1</artifactId>
<version>${persistence-service.version}</version>
</dependency>
<!-- commons -->
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>spring-commons</artifactId>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>logging-commons</artifactId>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>metric-commons</artifactId>
</dependency>
<!-- other external -->
<dependency>
<!-- this dependency is necessary to work with java8 zoned timestamps -->
<groupId>com.iqser.red.commons</groupId>
<artifactId>jackson-commons</artifactId>
</dependency>
<dependency>
<groupId>org.opensearch.client</groupId>
<artifactId>opensearch-rest-high-level-client</artifactId>
<version>1.2.4</version>
</dependency>
<!-- spring -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-openfeign</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-amqp</artifactId>
<version>2.3.1.RELEASE</version>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>test-commons</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>elasticsearch</artifactId>
<version>1.16.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit-test</artifactId>
<version>2.3.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<annotationProcessors>
<annotationProcessor>lombok.launch.AnnotationProcessorHider$AnnotationProcessor</annotationProcessor>
<annotationProcessor>com.dslplatform.json.processor.CompiledJsonAnnotationProcessor</annotationProcessor>
</annotationProcessors>
</configuration>
</plugin>
<plugin>
<!-- generate git.properties for exposure in /info -->
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>revision</goal>
</goals>
<configuration>
<generateGitPropertiesFile>true</generateGitPropertiesFile>
<gitDescribe>
<tags>true</tags>
</gitDescribe>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<id>original-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>original</classifier>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<!-- repackages the generated jar into a runnable fat-jar and makes it
executable -->
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
<configuration>
<executable>true</executable>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -2,31 +2,27 @@ package com.iqser.red.service.search.v1.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Import;
import com.iqser.red.commons.spring.DefaultWebMvcConfiguration;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.knecon.fforesight.lifecyclecommons.LifecycleAutoconfiguration;
import com.knecon.fforesight.tenantcommons.MultiTenancyAutoConfiguration;
import io.micrometer.core.aop.TimedAspect;
import io.micrometer.core.instrument.MeterRegistry;
@ImportAutoConfiguration({MultiTenancyAutoConfiguration.class, LifecycleAutoconfiguration.class})
@Import({StorageAutoConfiguration.class})
@Import({DefaultWebMvcConfiguration.class})
@EnableFeignClients(basePackageClasses = FileStatusClient.class)
@EnableConfigurationProperties({ElasticsearchSettings.class, SearchServiceSettings.class})
@SpringBootApplication(exclude = {SecurityAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class})
@EnableAspectJAutoProxy
public class Application {
public static void main(String[] args) {
@ -35,6 +31,14 @@ public class Application {
}
@Bean
@ConditionalOnMissingBean
public ElasticsearchClient elasticsearchClient(ElasticsearchSettings elasticsearchSettings) {
return new ElasticsearchClient(elasticsearchSettings);
}
@Bean
public TimedAspect timedAspect(MeterRegistry registry) {

View File

@ -2,7 +2,7 @@ package com.iqser.red.service.search.v1.server.client;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.DossierResource;
import com.iqser.red.service.persistence.service.v1.api.resources.DossierResource;
@FeignClient(name = "DossierResource", url = "${persistence-service.url}")
public interface DossierClient extends DossierResource {

View File

@ -0,0 +1,71 @@
package com.iqser.red.service.search.v1.server.client;
import java.io.IOException;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.opensearch.client.RestClient;
import org.opensearch.client.RestClientBuilder;
import org.opensearch.client.RestHighLevelClient;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import lombok.RequiredArgsConstructor;
import lombok.experimental.Delegate;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class ElasticsearchClient {
// Lower timeouts should be set per request.
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
private final ElasticsearchSettings settings;
@Delegate
private RestHighLevelClient client;
@PostConstruct
public void init() {
HttpHost[] httpHost = settings.getHosts()
.stream()
.map(host -> new HttpHost(host, settings.getPort(), settings.getScheme()))
.collect(Collectors.toList())
.toArray(new HttpHost[settings.getHosts().size()]);
RestClientBuilder builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT).setSocketTimeout(ABSURD_HIGH_TIMEOUT));
if (settings.getUsername() != null && !settings.getUsername().isEmpty()) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(settings.getUsername(), settings.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
client = new RestHighLevelClient(builder);
}
@PreDestroy
public void shutdown() {
try {
client.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
}
}
}

View File

@ -2,7 +2,7 @@ package com.iqser.red.service.search.v1.server.client;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.StatusResource;
import com.iqser.red.service.persistence.service.v1.api.resources.StatusResource;
@FeignClient(name = "StatusResource", url = "${persistence-service.url}")
public interface FileStatusClient extends StatusResource {

View File

@ -2,7 +2,7 @@ package com.iqser.red.service.search.v1.server.client;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.FileStatusProcessingUpdateResource;
import com.iqser.red.service.persistence.service.v1.api.resources.FileStatusProcessingUpdateResource;
@FeignClient(name = "FileStatusProcessingUpdateResource", url = "${persistence-service.url}")
public interface FileStatusProcessingUpdateClient extends FileStatusProcessingUpdateResource {

View File

@ -2,9 +2,9 @@ package com.iqser.red.service.search.v1.server.client;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.IndexInformationResource;
import com.iqser.red.service.persistence.service.v1.api.resources.IndexInformationResource;
@FeignClient(name = "IndexInformationResource", url = "${persistence-service.url}")
public interface IndexInformationClient extends IndexInformationResource {
}
}

View File

@ -1,119 +0,0 @@
package com.iqser.red.service.search.v1.server.configuration;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.BindingBuilder;
import org.springframework.amqp.core.DirectExchange;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.QueueBuilder;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import lombok.RequiredArgsConstructor;
@Configuration
@RequiredArgsConstructor
public class MessagingConfiguration {
public static final String INDEXING_REQUEST_QUEUE_PREFIX = "indexing_request";
public static final String INDEXING_REQUEST_EXCHANGE = "indexing_request_exchange";
public static final String INDEXING_DLQ = "indexing_error";
public static final String DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX = "delete_from_index_request";
public static final String DELETE_FROM_INDEX_REQUEST_EXCHANGE = "delete_from_index_request_exchange";
public static final String DELETE_FROM_INDEX_DLQ = "delete_from_index_error";
public static final String X_ERROR_INFO_HEADER = "x-error-message";
public static final String X_ERROR_INFO_TIMESTAMP_HEADER = "x-error-message-timestamp";
@Value("${fforesight.multitenancy.tenant-delete-queue:search-service-tenant-delete}")
private String tenantDeleteEventQueueName;
@Value("${fforesight.multitenancy.tenant-delete-dlq:search-service-tenant-delete-error}")
private String tenantDeleteDLQName;
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
private String tenantUpdatedEventQueueName;
@Value("${fforesight.multitenancy.tenant-updated-dlq:search-service-tenant-updated-error}")
private String tenantUpdatedDLQName;
@Bean
public DirectExchange indexingRequestExchange() {
return new DirectExchange(INDEXING_REQUEST_EXCHANGE);
}
@Bean
public Queue indexingDLQ() {
return QueueBuilder.durable(INDEXING_DLQ).build();
}
@Bean
public DirectExchange deleteFromIndexRequestExchange() {
return new DirectExchange(DELETE_FROM_INDEX_REQUEST_EXCHANGE);
}
@Bean
public Queue deleteFromIndexDLQ() {
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
}
@Bean
public Binding tenantExchangeDeleteBinding(@Qualifier("tenantUserManagementTenantDeleteQueue") Queue tenantUserManagementTenantDeleteQueue,
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
return BindingBuilder.bind(tenantUserManagementTenantDeleteQueue).to(tenantExchange).with("tenant.delete");
}
@Bean("tenantUserManagementTenantDeleteQueue")
public Queue tenantDeleteQueue() {
return QueueBuilder.durable(this.tenantDeleteEventQueueName)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", this.tenantDeleteDLQName)
.build();
}
@Bean
public Queue tenantDeleteDLQ() {
return QueueBuilder.durable(this.tenantDeleteDLQName).build();
}
@Bean
public Binding tenantExchangeUpdatedBinding(@Qualifier("tenantUserManagementTenantUpdatedQueue") Queue tenantUserManagementTenantUpdatedQueue,
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
return BindingBuilder.bind(tenantUserManagementTenantUpdatedQueue).to(tenantExchange).with("tenant.updated");
}
@Bean("tenantUserManagementTenantUpdatedQueue")
public Queue tenantUpdatedQueue() {
return QueueBuilder.durable(this.tenantUpdatedEventQueueName)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", this.tenantUpdatedDLQName)
.build();
}
@Bean
public Queue tenantUpdatedDLQ() {
return QueueBuilder.durable(this.tenantUpdatedDLQName).build();
}
}

View File

@ -1,11 +0,0 @@
package com.iqser.red.service.search.v1.server.configuration;
import org.springframework.context.annotation.Configuration;
import com.knecon.fforesight.tenantcommons.queue.TenantMessagingConfiguration;
@Configuration
public class TenantMessagingConfigurationImpl extends TenantMessagingConfiguration {
}

View File

@ -2,31 +2,12 @@ package com.iqser.red.service.search.v1.server.exception;
public class IndexException extends RuntimeException {
private static final String INDEX_EXISTS_ERROR = "Unable to check, if index exists";
private static final String DOCUMENT_INDEX_ERROR = "Error during indexing document with id '%s'";
private static final String DOCUMENT_UPDATE_ERROR = "Error during updating document with id '%s'";
private static final String DOCUMENT_DELETE_ERROR = "Error during deleting document with id '%s'";
private static final String FAILED_TO_SEARCH = "Error during search";
public static final String INDEX_EXISTS_ERROR = "Unable to check, if index exists";
public static final String CONTENT_TO_JSON_ERROR = "Could not convert document with id '%s' to JSON!";
public static final String DOCUMENT_INDEX_ERROR = "Error during indexing document with id '%s'";
public static final String DOCUMENT_DELETE_ERROR = "Error during deleting document with id '%s'";
public static final String FAILED_TO_SEARCH = "Error during search";
public static IndexException indexExists(Throwable cause){
return new IndexException(INDEX_EXISTS_ERROR, cause);
}
public static IndexException documentIndexError(String fileId, Throwable cause){
return new IndexException(String.format(DOCUMENT_INDEX_ERROR, fileId), cause);
}
public static IndexException documentUpdateError(String fileId, Throwable cause){
return new IndexException(String.format(DOCUMENT_UPDATE_ERROR, fileId), cause);
}
public static IndexException documentDeleteError(String fileId, Throwable cause){
return new IndexException(String.format(DOCUMENT_DELETE_ERROR, fileId), cause);
}
public static IndexException searchFailed(Throwable cause){
return new IndexException(FAILED_TO_SEARCH, cause);
}
public IndexException(String message) {

View File

@ -11,8 +11,6 @@ import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@ -20,14 +18,12 @@ import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@SuppressWarnings("PMD")
public class MigrationStarterService {
private final ApplicationContext ctx;
private final IndexInformationService indexInformationService;
private final IndexingMessageReceiver indexingMessageReceiver;
private final SearchServiceSettings settings;
private final TenantsClient tenantsClient;
@EventListener(ApplicationReadyEvent.class)
@ -35,15 +31,12 @@ public class MigrationStarterService {
// This can only run in post upgrade hook, because otherwise the old service is still runnnig.
if (settings.isMigrateOnly()) {
tenantsClient.getTenants().forEach(tenant -> {
TenantContext.setTenantId(tenant.getTenantId());
if (indexInformationService.hasIndexChanged()) {
log.info("Index has changed and will be closed, dropped, recreated and all files will be indexed");
indexingMessageReceiver.receiveIndexingRequest(IndexMessage.builder().messageType(IndexMessageType.DROP).build());
}
});
if (indexInformationService.hasIndexChanged()) {
log.info("Index has changed and will be closed, dropped, recreated and all files will be indexed");
indexingMessageReceiver.receiveIndexingRequest(IndexMessage.builder().messageType(IndexMessageType.DROP).build());
}
System.exit(SpringApplication.exit(ctx, () -> 0));
}
}
}
}

View File

@ -1,21 +0,0 @@
package com.iqser.red.service.search.v1.server.model;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@Builder
@AllArgsConstructor
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
public class Connection {
@EqualsAndHashCode.Include
private String hosts;
private SearchConnection searchConnection;
}

View File

@ -1,8 +1,8 @@
package com.iqser.red.service.search.v1.server.model;
import java.io.Serializable;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import lombok.AccessLevel;
@ -23,7 +23,7 @@ public class IndexDocument implements Serializable {
private String fileId;
private String filename;
private Date date;
private OffsetDateTime date;
private String assignee;
private boolean dossierDeleted;
private boolean dossierArchived;

View File

@ -16,7 +16,7 @@ import lombok.NoArgsConstructor;
@SuppressWarnings("serial")
public class IndexSection implements Serializable {
private String sectionNumber;
private int sectionNumber;
private String text;
private Set<Integer> pages;
private String headline;

View File

@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor
public class SectionText {
private String sectionNumber;
private int sectionNumber;
private String headline;
private String text;

View File

@ -3,11 +3,14 @@ package com.iqser.red.service.search.v1.server.model;
import java.util.ArrayList;
import java.util.List;
import com.dslplatform.json.CompiledJson;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@CompiledJson
@NoArgsConstructor
@AllArgsConstructor
public class Text {

View File

@ -1,38 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class DeleteTenantMessageReceiver {
private final IndexDeleteService indexDeleteService;
@Value("${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
private String tenantDeleteQueue;
@PostConstruct
public void postConstruct() {
log.info("Listener for tenant-delete started for queue: {}", this.tenantDeleteQueue);
}
@RabbitListener(queues = "${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
public void deleteTenant(TenantResponse tenant) {
indexDeleteService.dropIndex(tenant.getSearchConnection());
}
}

View File

@ -1,14 +1,10 @@
package com.iqser.red.service.search.v1.server.queue;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_DLQ;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_REQUEST_EXCHANGE;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER;
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.List;
import org.springframework.amqp.AmqpRejectAndDontRequeueException;
@ -18,10 +14,10 @@ import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.search.v1.model.IndexMessage;
import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.server.client.DossierClient;
@ -31,12 +27,12 @@ import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.service.IndexCreatorService;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
import com.iqser.red.service.search.v1.server.service.TextStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@ -46,27 +42,23 @@ import lombok.extern.slf4j.Slf4j;
@RequiredArgsConstructor
public class IndexingMessageReceiver {
public static final String INDEXING_LISTENER_ID = "indexing-listener";
public static final String DELETE_FROM_INDEX_LISTENER_ID = "delete-from-index-listener";
private final ObjectMapper objectMapper;
private final TextStorageService textStorageService;
private final DocumentIndexService documentIndexService;
private final FileStatusClient fileStatusClient;
private final DossierClient dossierClient;
private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
private final RabbitTemplate rabbitTemplate;
private final DocumentDeleteService documentDeleteService;
private final DocumentUpdateService documentUpdateService;
private final DocumentIndexService documentIndexService;
private final IndexCreatorService indexCreatorService;
private final RabbitTemplate rabbitTemplate;
private final IndexDeleteService indexDeleteService;
private final IndexInformationService indexInformationService;
private final IndexDocumentConverterService indexDocumentConverterService;
@SneakyThrows
@RabbitHandler
@RabbitListener(id = INDEXING_LISTENER_ID)
@RabbitListener(queues = INDEXING_QUEUE)
public void receiveIndexingRequest(Message message) {
var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
@ -74,18 +66,11 @@ public class IndexingMessageReceiver {
// This prevents from endless retries oom errors.
if (message.getMessageProperties().isRedelivered()) {
throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
indexRequest.getDossierId(),
indexRequest.getFileId()));
indexRequest.getDossierId(),
indexRequest.getFileId()));
}
try {
receiveIndexingRequest(indexRequest);
} catch (Exception e) {
log.warn("An exception occurred in processing the indexing request stage: ", e);
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
throw e;
}
receiveIndexingRequest(indexRequest);
}
@ -93,36 +78,35 @@ public class IndexingMessageReceiver {
log.info("Processing indexing request: {}", indexRequest);
FileModel fileStatus;
Dossier dossier;
switch (indexRequest.getMessageType()) {
case INSERT:
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
var fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
var dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
indexFile(dossier, fileStatus);
break;
case UPDATE:
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
if(documentUpdateService.documentExists(indexRequest.getFileId())) {var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
fileStatus.getWorkflowStatus().name(),
fileStatus.getFileAttributes());
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
log.info("Successfully updated {}", indexRequest);
} else {
indexFile(dossier, fileStatus);
}
documentUpdateService.updateDocument(indexRequest.getFileId(),
fileStatus.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
fileStatus.getWorkflowStatus().name(),
fileStatus.getFileAttributes());
log.info("Successfully updated {}", indexRequest);
break;
case DROP:
indexDeleteService.recreateIndex();
indexDeleteService.closeIndex();
indexDeleteService.dropIndex();
indexCreatorService.createIndex();
addAllDocumentsToIndexQueue();
indexInformationService.updateIndexInformation();
try {
indexInformationService.updateIndexInformation();
} catch (Exception e) {
log.error("Could not update index information", e);
}
break;
default:
@ -133,56 +117,34 @@ public class IndexingMessageReceiver {
@RabbitHandler
@RabbitListener(queues = INDEXING_DLQ)
public void receiveIndexingRequestDQL(Message in) throws IOException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
String errorLog = "Failed to process indexing request:";
log.info(errorLog + ": {}", indexRequest);
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
indexRequest.getFileId(),
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
@RabbitListener(queues = INDEXING_DQL)
public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
log.info("Failed to process indexing request: {}", indexRequest);
}
@RabbitHandler
@RabbitListener(id = DELETE_FROM_INDEX_LISTENER_ID)
public void receiveDeleteDocumentRequest(Message in) throws IOException {
@RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
log.info("Processing delete document request: {}", indexRequest);
try {
documentDeleteService.deleteDocument(indexRequest.getFileId());
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
} catch (Exception e) {
log.warn("An exception occurred in processing delete document stage: {}", e.getMessage());
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
throw e;
}
documentDeleteService.deleteDocument(indexRequest.getFileId());
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
}
@RabbitHandler
@RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
public void receiveDeleteDocumentRequestDLQ(Message in) throws IOException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
String errorLog = "Failed to process delete from index request ";
log.info(errorLog + ": {}", indexRequest);
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
indexRequest.getFileId(),
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
log.info("Failed to process delete from index request: {}", indexRequest);
}
@ -190,19 +152,16 @@ public class IndexingMessageReceiver {
fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
Text text = textStorageService.getText(dossier.getId(), file.getId());
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
dossier.getId(),
file.getId(),
file.getFilename(),
text,
file.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
file.getWorkflowStatus(),
file.getFileAttributes());
documentIndexService.indexDocument(indexDocument);
documentIndexService.indexDocument(dossier.getDossierTemplateId(),
dossier.getId(),
file.getId(),
file.getFilename(),
text,
file.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
file.getWorkflowStatus(),
file.getFileAttributes());
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
}
@ -222,14 +181,17 @@ public class IndexingMessageReceiver {
private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
for (FileModel file : files) {
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
rabbitTemplate.convertAndSend(INDEXING_REQUEST_EXCHANGE,
TenantContext.getTenantId(),
IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build(),
message -> {
message.getMessageProperties().setPriority(99);
return message;
});
try {
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
rabbitTemplate.convertAndSend(INDEXING_QUEUE,
objectMapper.writeValueAsString(IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build()),
message -> {
message.getMessageProperties().setPriority(99);
return message;
});
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,52 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.QueueBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import lombok.RequiredArgsConstructor;
@Configuration
@RequiredArgsConstructor
public class MessagingConfiguration {
public static final String INDEXING_QUEUE = "indexingQueue";
public static final String INDEXING_DQL = "indexingDQL";
public static final String DELETE_FROM_INDEX_QUEUE = "deleteFromIndexQueue";
public static final String DELETE_FROM_INDEX_DLQ = "deleteFromIndexDLQ";
@Bean
public Queue indexingQueue() {
return QueueBuilder.durable(INDEXING_QUEUE).withArgument("x-dead-letter-exchange", "").withArgument("x-dead-letter-routing-key", INDEXING_DQL).maxPriority(2).build();
}
@Bean
public Queue indexingDeadLetterQueue() {
return QueueBuilder.durable(INDEXING_DQL).build();
}
@Bean
public Queue deleteFromIndexQueue() {
return QueueBuilder.durable(DELETE_FROM_INDEX_QUEUE)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", DELETE_FROM_INDEX_DLQ)
.maxPriority(2)
.build();
}
@Bean
public Queue deleteFromIndexDLQ() {
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
}
}

View File

@ -1,74 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.*;
import java.util.Map;
import java.util.Set;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Service;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import com.knecon.fforesight.tenantcommons.model.TenantCreatedEvent;
import com.knecon.fforesight.tenantcommons.model.TenantQueueConfiguration;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import com.knecon.fforesight.tenantcommons.queue.RabbitQueueFromExchangeService;
import com.knecon.fforesight.tenantcommons.queue.TenantExchangeMessageReceiver;
@Service
public class TenantExchangeMessageReceiverImpl extends TenantExchangeMessageReceiver {
public TenantExchangeMessageReceiverImpl(RabbitQueueFromExchangeService rabbitQueueService, TenantProvider tenantProvider) {
super(rabbitQueueService, tenantProvider);
}
@Override
protected Set<TenantQueueConfiguration> getTenantQueueConfigs() {
return Set.of(TenantQueueConfiguration.builder()
.listenerId(IndexingMessageReceiver.INDEXING_LISTENER_ID)
.exchangeName(INDEXING_REQUEST_EXCHANGE)
.queuePrefix(INDEXING_REQUEST_QUEUE_PREFIX)
.dlqName(INDEXING_DLQ)
.arguments(Map.of("x-max-priority", 2))
.build(),
TenantQueueConfiguration.builder()
.listenerId(IndexingMessageReceiver.DELETE_FROM_INDEX_LISTENER_ID)
.exchangeName(DELETE_FROM_INDEX_REQUEST_EXCHANGE)
.queuePrefix(DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX)
.dlqName(DELETE_FROM_INDEX_DLQ)
.arguments(Map.of("x-max-priority", 2))
.build());
}
@EventListener(ApplicationReadyEvent.class)
public void onApplicationReady() {
System.out.println("application ready invoked");
super.initializeQueues();
}
@RabbitHandler
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantCreatedQueueName()}")
public void reactToTenantCreation(TenantCreatedEvent tenantCreatedEvent) {
super.reactToTenantCreation(tenantCreatedEvent);
}
@RabbitHandler
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantDeletedQueueName()}")
public void reactToTenantDeletion(TenantResponse tenantResponse) {
super.reactToTenantDeletion(tenantResponse);
}
}

View File

@ -1,48 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class UpdatedTenantMessageReceiver {
private final IndexQueryService indexQueryService;
private final IndexDeleteService indexDeleteService;
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
private String tenantUpdatedQueue;
@PostConstruct
public void postConstruct() {
log.info("Listener for tenant updated events started for queue: {}", this.tenantUpdatedQueue);
}
@RabbitListener(queues = "${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
public void updateTenant(TenantResponse tenant) {
String numberOfReplicas = tenant.getSearchConnection().getNumberOfReplicas();
String numberOfShards = tenant.getSearchConnection().getNumberOfShards();
IndexQueryResult queryResult = indexQueryService.getIndexQueryResult(tenant.getSearchConnection());
if (queryResult.isIndexFound() && (!numberOfReplicas.equals(queryResult.getNumberOfReplicas()) || !numberOfShards.equals(queryResult.getNumberOfShards()))) {
log.info("Number of shards or replicas were changed during tenant update, indices will be recreated");
indexDeleteService.recreateIndex(tenant.getSearchConnection());
}
}
}

View File

@ -1,8 +1,38 @@
package com.iqser.red.service.search.v1.server.service;
public interface DocumentDeleteService {
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
void deleteDocument(String fileId);
import java.io.IOException;
import org.opensearch.action.delete.DeleteRequest;
import org.opensearch.client.RequestOptions;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
public class DocumentDeleteService {
private final ElasticsearchClient client;
private final ElasticsearchSettings settings;
public void deleteDocument(String fileId) {
DeleteRequest request = new DeleteRequest(INDEX_NAME).id(fileId).setRefreshPolicy(settings.getRefreshPolicy());
try {
client.delete(request, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new IndexException(String.format(IndexException.DOCUMENT_DELETE_ERROR, fileId), e);
}
}
}

View File

@ -1,10 +1,123 @@
package com.iqser.red.service.search.v1.server.service;
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.opensearch.action.index.IndexRequest;
import org.opensearch.client.RequestOptions;
import org.opensearch.common.xcontent.XContentType;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
import com.iqser.red.service.search.v1.server.model.IndexSection;
import com.iqser.red.service.search.v1.server.model.SectionArea;
import com.iqser.red.service.search.v1.server.model.SectionText;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
public interface DocumentIndexService {
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class DocumentIndexService {
private final ElasticsearchClient client;
private final ElasticsearchSettings settings;
private final ObjectMapper objectMapper;
void indexDocument(IndexDocument indexDocument);
@Timed("redactmanager_indexDocument")
public void indexDocument(String dossierTemplateId,
String dossierId,
String fileId,
String filename,
Text text,
String assignee,
boolean deleted,
boolean archived,
WorkflowStatus workflowStatus,
Map<String, String> fileAttributes) {
IndexRequest indexRequest = new IndexRequest(INDEX_NAME).id(fileId);
indexRequest.setRefreshPolicy(settings.getRefreshPolicy());
indexRequest.source(toJson(convert(dossierTemplateId, dossierId, fileId, filename, text, assignee, deleted, archived, workflowStatus, fileAttributes)), XContentType.JSON);
try {
client.index(indexRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new IndexException(String.format(IndexException.DOCUMENT_INDEX_ERROR, fileId), e);
}
}
public String toJson(IndexDocument indexDocument) {
try {
return objectMapper.writeValueAsString(indexDocument);
} catch (JsonProcessingException e) {
throw new IndexException(String.format(IndexException.CONTENT_TO_JSON_ERROR, indexDocument.getFileId()), e);
}
}
private IndexDocument convert(String dossierTemplateId,
String dossierId,
String fileId,
String filename,
Text text,
String assignee,
boolean deleted,
boolean archived,
WorkflowStatus workflowStatus,
Map<String, String> fileAttributes) {
return IndexDocument.builder()
.dossierTemplateId(dossierTemplateId)
.dossierId(dossierId)
.fileId(fileId)
.filename(filename)
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
.date(OffsetDateTime.now())
.fileAttributes(convertFileAttributes(fileAttributes))
.assignee(assignee)
.dossierDeleted(deleted)
.dossierArchived(archived)
.workflowStatus(workflowStatus.name())
.build();
}
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
List<IndexFileAttribute> converted = new ArrayList<>();
fileAttributes.entrySet().forEach(entry -> converted.add(new IndexFileAttribute(entry.getKey(), entry.getValue())));
return converted;
}
private IndexSection convert(SectionText sectionText) {
return IndexSection.builder()
.sectionNumber(sectionText.getSectionNumber())
.text(sectionText.getText())
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
.headline(sectionText.getHeadline())
.build();
}
}

View File

@ -1,10 +1,64 @@
package com.iqser.red.service.search.v1.server.service;
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.opensearch.action.update.UpdateRequest;
import org.opensearch.action.update.UpdateResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.common.xcontent.XContentType;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
public interface DocumentUpdateService {
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate);
boolean documentExists(String fileId);
@Service
@RequiredArgsConstructor
public class DocumentUpdateService {
}
private final ObjectMapper objectMapper;
private final ElasticsearchClient client;
@SneakyThrows
@Timed("redactmanager_updateDocument")
public void updateDocument(String fileId, String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
var indexUpdateRequest = IndexDocumentUpdate.builder()
.assignee(assignee)
.dossierDeleted(deleted)
.dossierArchived(archived)
.workflowStatus(workflowStatus)
.fileAttributes(convertFileAttributes(fileAttributes))
.build();
UpdateRequest request = new UpdateRequest(INDEX_NAME, fileId);
request.doc(objectMapper.writeValueAsString(indexUpdateRequest), XContentType.JSON);
UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT);
if (updateResponse.status().getStatus() < 200 || updateResponse.status().getStatus() > 204) {
throw new IllegalStateException("Document could not be updated. HTTP error " + updateResponse.status().getStatus());
}
}
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
List<IndexFileAttribute> converted = new ArrayList<>();
if (fileAttributes != null && !fileAttributes.isEmpty()) {
fileAttributes.forEach((key, value) -> converted.add(new IndexFileAttribute(key, value)));
}
return converted;
}
}

View File

@ -0,0 +1,75 @@
package com.iqser.red.service.search.v1.server.service;
import static com.iqser.red.service.search.v1.server.exception.IndexException.INDEX_EXISTS_ERROR;
import java.io.IOException;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.indices.CreateIndexRequest;
import org.opensearch.client.indices.CreateIndexResponse;
import org.opensearch.client.indices.GetIndexRequest;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentType;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.ResourceLoader;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
public class IndexCreatorService {
public static final String INDEX_NAME = "redaction";
private final ElasticsearchClient client;
private final ElasticsearchSettings settings;
public IndexCreatorService(ElasticsearchClient client, ElasticsearchSettings settings) {
this.client = client;
this.settings = settings;
if (!indexExists()) {
createIndex();
}
}
public void createIndex() {
String indexMapping = ResourceLoader.load("index/mapping.json");
String indexSettings = ResourceLoader.load("index/settings.json");
Settings.Builder settingsBuilder = Settings.builder()
.loadFromSource(indexSettings, XContentType.JSON)
.put("number_of_shards", settings.getNumberOfShards())
.put("number_of_replicas", settings.getNumberOfReplicas())
.put("index.mapping.nested_objects.limit", settings.getNumberOfNestedObjectLimit());
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME).settings(settingsBuilder.build()).mapping(indexMapping, XContentType.JSON);
try {
CreateIndexResponse response = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
log.info("Successfully created index: {}", response.index());
} catch (IOException e) {
log.error("Failed to create index.", e);
}
}
private boolean indexExists() {
GetIndexRequest getIndexRequest = new GetIndexRequest(INDEX_NAME);
try {
return client.indices().exists(getIndexRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new IndexException(INDEX_EXISTS_ERROR, e);
}
}
}

View File

@ -1,19 +1,61 @@
package com.iqser.red.service.search.v1.server.service;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
public interface IndexDeleteService {
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
import org.opensearch.action.support.master.AcknowledgedResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.client.indices.CloseIndexRequest;
import org.opensearch.common.unit.TimeValue;
import org.springframework.stereotype.Service;
void recreateIndex();
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.exception.IndexException;
void recreateIndex(SearchConnection searchConnection);
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class IndexDeleteService {
private final ElasticsearchClient client;
void closeIndex();
@SneakyThrows
public void closeIndex() {
log.info("Will close index");
CloseIndexRequest request = new CloseIndexRequest(INDEX_NAME);
request.setTimeout(TimeValue.timeValueMinutes(2));
AcknowledgedResponse closeIndexResponse = client.indices().close(request, RequestOptions.DEFAULT);
if (closeIndexResponse.isAcknowledged()) {
log.info("Index is closed");
} else {
throw new IndexException("Error while closing index");
}
}
void dropIndex();
@SneakyThrows
public void dropIndex() {
void dropIndex(SearchConnection searchConnection);
log.info("Will drop index");
DeleteIndexRequest request = new DeleteIndexRequest(INDEX_NAME);
request.timeout(TimeValue.timeValueMinutes(2));
request.timeout("2m");
AcknowledgedResponse deleteIndexResponse = client.indices().delete(request, RequestOptions.DEFAULT);
}
if (deleteIndexResponse.isAcknowledged()) {
log.info("Index is dropped");
} else {
throw new IndexException("Error while dropping index");
}
}
}

View File

@ -1,84 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
import com.iqser.red.service.search.v1.server.model.IndexSection;
import com.iqser.red.service.search.v1.server.model.SectionArea;
import com.iqser.red.service.search.v1.server.model.SectionText;
import com.iqser.red.service.search.v1.server.model.Text;
@Service
public class IndexDocumentConverterService {
public IndexDocument convert(String dossierTemplateId,
String dossierId,
String fileId,
String filename,
Text text,
String assignee,
boolean deleted,
boolean archived,
WorkflowStatus workflowStatus,
Map<String, String> fileAttributes) {
return IndexDocument.builder()
.dossierTemplateId(dossierTemplateId)
.dossierId(dossierId)
.fileId(fileId)
.filename(filename)
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
.date(Date.from(OffsetDateTime.now().toInstant()))
.fileAttributes(convertFileAttributes(fileAttributes))
.assignee(assignee)
.dossierDeleted(deleted)
.dossierArchived(archived)
.workflowStatus(workflowStatus.name())
.build();
}
public IndexDocumentUpdate convertUpdateDocument(String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
return IndexDocumentUpdate.builder()
.assignee(assignee)
.dossierDeleted(deleted)
.dossierArchived(archived)
.workflowStatus(workflowStatus)
.fileAttributes(convertFileAttributes(fileAttributes))
.build();
}
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
List<IndexFileAttribute> converted = new ArrayList<>();
if (fileAttributes != null && !fileAttributes.isEmpty()) {
fileAttributes.forEach((key, value) -> converted.add(new IndexFileAttribute(key, value)));
}
return converted;
}
private IndexSection convert(SectionText sectionText) {
return IndexSection.builder()
.sectionNumber(sectionText.getSectionNumber())
.text(sectionText.getText())
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
.headline(sectionText.getHeadline())
.build();
}
}

View File

@ -10,7 +10,7 @@ import org.apache.commons.codec.binary.StringUtils;
import org.springframework.core.io.ClassPathResource;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
import com.iqser.red.service.persistence.service.v1.api.model.index.IndexInformation;
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
import lombok.RequiredArgsConstructor;
@ -68,13 +68,13 @@ public class IndexInformationService {
byte[] buffer = new byte[8192];
int count;
MessageDigest digest = MessageDigest.getInstance("SHA-256");
try (BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream())) {
while ((count = bis.read(buffer)) > 0) {
digest.update(buffer, 0, count);
}
return Arrays.toString(digest.digest());
BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream());
while ((count = bis.read(buffer)) > 0) {
digest.update(buffer, 0, count);
}
bis.close();
return Arrays.toString(digest.digest());
}
}

View File

@ -1,17 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Data;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class IndexQueryResult {
boolean indexFound;
String numberOfShards;
String numberOfReplicas;
}

View File

@ -1,9 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
public interface IndexQueryService {
IndexQueryResult getIndexQueryResult(SearchConnection searchConnection);
}

View File

@ -1,23 +1,313 @@
package com.iqser.red.service.search.v1.server.service;
import static com.iqser.red.service.search.v1.server.exception.IndexException.FAILED_TO_SEARCH;
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.lucene.search.join.ScoreMode;
import org.opensearch.action.search.SearchRequest;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.client.RequestOptions;
import org.opensearch.common.text.Text;
import org.opensearch.index.query.BoolQueryBuilder;
import org.opensearch.index.query.InnerHitBuilder;
import org.opensearch.index.query.NestedQueryBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.index.query.QueryBuilders;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchHits;
import org.opensearch.search.builder.SearchSourceBuilder;
import org.opensearch.search.fetch.subphase.FetchSourceContext;
import org.opensearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.MatchedSection;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.Query;
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
public interface SearchService {
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.util.StringUtils;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections);
@Slf4j
@Service
@RequiredArgsConstructor
public class SearchService {
}
private final ElasticsearchClient client;
@Timed("redactmanager_search")
public SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections) {
Query query = QueryStringConverter.convert(queryString);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(convertQuery(query,
dossierTemplateIds,
dossierIds,
fileId,
assignee,
includeDeletedDossiers,
includeArchivedDossiers,
workflowStatus,
fileAttributes,
returnSections))
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
.size(getPageSizeOrDefault(pageSize))
.fetchSource(new String[]{"dossierId", "dossierTemplateId", "dossierDeleted", "dossierArchived", "filename", "fileId", "assignee", "dossierStatus", "workflowStatus", "fileAttributes"},
new String[]{"sections"})
.highlighter(new HighlightBuilder().field("sections.text").field("filename").field("fileAttributes.value").highlighterType("fvh"))
.trackScores(true);
SearchRequest request = new SearchRequest(INDEX_NAME).source(searchSourceBuilder);
SearchResponse response = execute(request);
return convert(response, query);
}
protected SearchResponse execute(SearchRequest searchRequest) {
try {
return client.search(searchRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
throw new IndexException(FAILED_TO_SEARCH, e);
}
}
private QueryBuilder convertQuery(Query query,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
boolean returnSections) {
BoolQueryBuilder entireQuery = QueryBuilders.boolQuery();
BoolQueryBuilder sectionsQueries = QueryBuilders.boolQuery();
for (String must : query.getMusts()) {
QueryBuilder textPhraseQuery = QueryBuilders.matchPhraseQuery("sections.text", must.toLowerCase(Locale.ROOT)).queryName(must);
QueryBuilder filenamePhraseQuery = QueryBuilders.matchPhrasePrefixQuery("filename", must.toLowerCase(Locale.ROOT)).queryName("filename." + must);
QueryBuilder fileAttributesPhraseQuery = QueryBuilders.matchPhraseQuery("fileAttributes.value", must.toLowerCase(Locale.ROOT)).queryName("fileAttributes." + must);
QueryBuilder filenameOrTextMustQuery = QueryBuilders.boolQuery().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery);
entireQuery.must(filenameOrTextMustQuery);
sectionsQueries.should(textPhraseQuery);
}
for (String should : query.getShoulds()) {
QueryBuilder textTermQuery = QueryBuilders.matchPhraseQuery("sections.text", should.toLowerCase(Locale.ROOT)).queryName(should);
QueryBuilder filenameTermQuery = QueryBuilders.matchPhrasePrefixQuery("filename", should.toLowerCase(Locale.ROOT)).queryName("filename." + should);
QueryBuilder fileAttributesPhraseQuery = QueryBuilders.matchPhraseQuery("fileAttributes.value", should.toLowerCase(Locale.ROOT)).queryName("fileAttributes." + should);
entireQuery.should(textTermQuery);
entireQuery.should(filenameTermQuery);
entireQuery.should(fileAttributesPhraseQuery);
sectionsQueries.should(textTermQuery);
}
if (returnSections) {
NestedQueryBuilder nestedQuery = QueryBuilders.nestedQuery("sections", sectionsQueries, ScoreMode.Avg)
.innerHit(new InnerHitBuilder().setSize(100)
.setFetchSourceContext(new FetchSourceContext(true,
new String[]{"sections.headline", "sections.sectionNumber", "sections.pages"},
new String[]{"sections.text"})));
entireQuery.should(nestedQuery);
}
BoolQueryBuilder filterQuery = QueryBuilders.boolQuery();
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
var dossierTemplateIdQueryBuilder = QueryBuilders.boolQuery();
for (var dossierTemplateId : dossierTemplateIds) {
if (StringUtils.isNotEmpty(dossierTemplateId)) {
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.matchQuery("dossierTemplateId", dossierTemplateId));
}
}
filterQuery.must(dossierTemplateIdQueryBuilder);
}
if (dossierIds != null && !dossierIds.isEmpty()) {
var dossierIdQueryBuilder = QueryBuilders.boolQuery();
for (var dossierId : dossierIds) {
if (StringUtils.isNotEmpty(dossierId)) {
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.matchQuery("dossierId", dossierId));
}
}
filterQuery.must(dossierIdQueryBuilder);
}
if (StringUtils.isNotEmpty(fileId)) {
filterQuery.must(QueryBuilders.matchQuery("fileId", fileId));
}
if (StringUtils.isNotEmpty(assignee)) {
filterQuery.must(QueryBuilders.matchQuery("assignee", assignee));
}
if (includeArchivedDossiers) {
filterQuery.must(QueryBuilders.termsQuery("dossierArchived", true, false));
} else {
filterQuery.must(QueryBuilders.termsQuery("dossierArchived", false));
}
if (includeDeletedDossiers) {
filterQuery.must(QueryBuilders.termsQuery("dossierDeleted", true, false));
} else {
filterQuery.must(QueryBuilders.termsQuery("dossierDeleted", false));
}
if (StringUtils.isNotEmpty(workflowStatus)) {
filterQuery.must(QueryBuilders.matchQuery("workflowStatus", workflowStatus));
}
if (fileAttributes != null && !fileAttributes.isEmpty()) {
var fileAttributesQueryBuilder = QueryBuilders.boolQuery();
for (var fileAttributeKey : fileAttributes.keySet()) {
if (StringUtils.isNotEmpty(fileAttributeKey)) {
fileAttributesQueryBuilder.filter(QueryBuilders.boolQuery()
.must(QueryBuilders.matchQuery("fileAttributes.name", fileAttributeKey))
.must(QueryBuilders.matchQuery("fileAttributes.value", fileAttributes.get(fileAttributeKey))));
}
}
filterQuery.must(fileAttributesQueryBuilder);
}
return QueryBuilders.boolQuery().filter(filterQuery).must(entireQuery);
}
private SearchResult convert(SearchResponse response, Query query) {
return SearchResult.builder()
.matchedDocuments(Arrays.stream(response.getHits().getHits()).map(hit -> convertSearchHit(hit, query)).collect(Collectors.toList()))
.maxScore(response.getHits().getMaxScore())
.total(response.getHits().getTotalHits().value)
.build();
}
private MatchedDocument convertSearchHit(SearchHit hit, Query query) {
Set<String> matchesTerms = Arrays.stream(hit.getMatchedQueries())
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
.collect(Collectors.toSet());
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
.score(hit.getScore())
.dossierId((String) hit.getSourceAsMap().get("dossierId"))
.dossierTemplateId((String) hit.getSourceAsMap().get("dossierTemplateId"))
.fileId((String) hit.getSourceAsMap().get("fileId"))
.dossierStatus((String) hit.getSourceAsMap().get("dossierStatus"))
.assignee((String) hit.getSourceAsMap().get("assignee"))
.fileAttributes(convertFileAttributes(hit.getSourceAsMap().get("fileAttributes")))
.workflowStatus((String) hit.getSourceAsMap().get("workflowStatus"))
.fileName((String) hit.getSourceAsMap().get("fileName"))
.dossierDeleted((Boolean) hit.getSourceAsMap().get("dossierDeleted"))
.dossierArchived((Boolean) hit.getSourceAsMap().get("dossierArchived"))
.highlights(hit.getHighlightFields()
.entrySet()
.stream()
.collect(Collectors.toMap(Map.Entry::getKey, e -> Arrays.stream(e.getValue().getFragments()).map(Text::string).collect(Collectors.toSet()))))
.matchedTerms(matchesTerms)
.unmatchedTerms(unmatchedTerms);
if (hit.getInnerHits() != null) {
SearchHits sectionHits = hit.getInnerHits().get("sections");
matchedDocumentBuilder.matchedSections(Arrays.stream(sectionHits.getHits()).map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
.containsAllMatchedSections(sectionHits.getTotalHits().value == sectionHits.getHits().length);
}
return matchedDocumentBuilder.build();
}
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
Map<String, String> fileAttributes = new HashMap<>();
if (fileAttributesSourceMap != null) {
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
}
return fileAttributes;
}
private MatchedSection convertInnerHit(SearchHit hit) {
return MatchedSection.builder()
.headline((String) hit.getSourceAsMap().get("headline"))
.sectionNumber((Integer) hit.getSourceAsMap().get("sectionNumber"))
.pages(new HashSet<>((ArrayList<Integer>) hit.getSourceAsMap().get("pages")))
.matchedTerms(Arrays.stream(hit.getMatchedQueries()).collect(Collectors.toSet()))
.build();
}
private int getPageSizeOrDefault(int pageSize) {
if (pageSize <= 0) {
return 10;
}
return pageSize;
}
private int getPageOrDefault(int page) {
if (page < 0) {
return 0;
}
return page;
}
}

View File

@ -2,11 +2,10 @@ package com.iqser.red.service.search.v1.server.service;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
@ -24,7 +23,7 @@ public class TextStorageService {
public Text getText(String dossierId, String fileId) {
try {
return storageService.readJSONObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.SIMPLIFIED_TEXT), Text.class);
return storageService.readJSONObject(StorageIdUtils.getStorageId(dossierId, fileId, FileType.SIMPLIFIED_TEXT), Text.class);
} catch (StorageObjectDoesNotExist e) {
throw new RuntimeException("Text is not available", e);
} catch (Exception e) {

View File

@ -1,43 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
import co.elastic.clients.elasticsearch.core.DeleteRequest;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
private final EsClientCache clientCache;
private final ElasticsearchSettings settings;
public void deleteDocument(String fileId) {
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.build();
try {
clientCache.getClient().delete(request);
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentDeleteError(fileId, e);
}
}
}

View File

@ -1,45 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class DocumentIndexServiceImpl implements DocumentIndexService {
private final EsClientCache clientCache;
private final ElasticsearchSettings settings;
@Timed("redactmanager_indexDocument")
public void indexDocument(IndexDocument indexDocument) {
try {
clientCache.getClient()
.index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(indexDocument.getFileId())
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.document(indexDocument));
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
}
}
}

View File

@ -1,53 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
private final EsClientCache clientCache;
private final ElasticsearchSettings settings;
@SneakyThrows
@Timed("redactmanager_updateDocument")
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
try {
clientCache.getClient()
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.doc(indexDocumentUpdate)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentUpdateError(fileId, e);
}
}
@SneakyThrows
@Timed("redactmanager_documentExists")
public boolean documentExists(String fileId) {
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
}
}

View File

@ -1,66 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.stream.Collectors;
import lombok.SneakyThrows;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
import co.elastic.clients.transport.ElasticsearchTransport;
import co.elastic.clients.transport.rest_client.RestClientTransport;
import lombok.Data;
import lombok.experimental.Delegate;
@Data
@SuppressWarnings("PMD")
public class EsClient {
// Lower timeouts should be set per request.
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
private SearchConnection searchConnection;
@Delegate
private ElasticsearchClient elasticsearchClient;
public EsClient(SearchConnection searchConnection) {
HttpHost[] httpHost = searchConnection.getHosts()
.stream()
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
.toList()
.toArray(new HttpHost[searchConnection.getHosts().size()]);
var builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT)
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
ElasticsearchTransport transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
this.searchConnection = searchConnection;
this.elasticsearchClient = new ElasticsearchClient(transport);
}
@SneakyThrows
public void terminate() {
elasticsearchClient._transport().close();
}
}

View File

@ -1,102 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class EsClientCache {
private final TenantsClient tenantsClient;
private final EncryptionDecryptionService encryptionDecryptionService;
private final IndexCreatorServiceImpl indexCreatorService;
@Value("${multitenancy.client-cache.maximumSize:100}")
private Long maximumSize;
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
private Integer expireAfterAccess;
private LoadingCache<String, EsClient> clients;
@SneakyThrows
public void isClientAliveOrTerminate() {
try {
var client = clients.get(TenantContext.getTenantId());
try {
log.info("Checking if client is still alive: {}", client.info());
} catch (Exception e) {
try {
client.terminate();
} catch (Exception e2) {
log.info("Failed to terminate ES Client");
clients.invalidate(TenantContext.getTenantId());
}
}
}catch (Exception e){
log.error("Failed to terminate/invalide client", e);
}
}
@PostConstruct
protected void createCache() {
clients = CacheBuilder.newBuilder()
.maximumSize(maximumSize)
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
.removalListener((RemovalListener<String, EsClient>) removal -> {
try {
removal.getValue().terminate();
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
} catch (Exception e) {
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
}
})
.build(new CacheLoader<>() {
public EsClient load(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
if (tenant.getSearchConnection().getPassword() != null) {
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
}
var client = new EsClient(tenant.getSearchConnection());
log.info("Initialized elasticsearch client for tenant {}", tenantId);
indexCreatorService.createIndex(client);
return client;
}
});
}
@SneakyThrows
public EsClient getClient() {
return clients.get(TenantContext.getTenantId());
}
}

View File

@ -1,84 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
import co.elastic.clients.elasticsearch.indices.IndexSettings;
import co.elastic.clients.elasticsearch.indices.MappingLimitSettingsNestedObjects;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
@SuppressWarnings("PMD")
public class IndexCreatorServiceImpl {
private final ElasticsearchSettings settings;
public void createIndex(EsClient esClient) {
if (!indexExists(esClient)) {
try {
var response = esClient.indices()
.create(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix()))
.settings(createIndexSettings(esClient))
.mappings(createIndexMapping()));
log.info("Successfully created index: {}", response.index());
} catch (IOException e) {
log.error("Failed to create index.", e);
}
}
}
private boolean indexExists(EsClient esClient) {
try {
var response = esClient.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix())));
return response.value();
} catch (IOException e) {
throw IndexException.indexExists(e);
}
}
@SneakyThrows
private TypeMapping createIndexMapping() {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
try (InputStream is = resource.openStream()) {
return new TypeMapping.Builder().withJson(is).build();
}
}
@SneakyThrows
private IndexSettings createIndexSettings(EsClient esClient) {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
try (InputStream is = resource.openStream()) {
return new IndexSettings.Builder().withJson(is)
.numberOfShards(esClient.getSearchConnection().getNumberOfShards())
.numberOfReplicas(esClient.getSearchConnection().getNumberOfReplicas())
.mapping(m -> m.nestedObjects(MappingLimitSettingsNestedObjects.of(a -> a.limit(settings.getNumberOfNestedObjectLimit()))))
.build();
}
}
}

View File

@ -1,94 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class IndexDeleteServiceImpl implements IndexDeleteService {
private final EsClientCache clientCache;
private final IndexCreatorServiceImpl indexCreatorService;
public void recreateIndex() {
closeIndex();
dropIndex();
indexCreatorService.createIndex(clientCache.getClient());
}
@Override
public void recreateIndex(SearchConnection searchConnection) {
var client = new EsClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
indexCreatorService.createIndex(client);
}
@SneakyThrows
public void closeIndex() {
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
@SneakyThrows
public void dropIndex() {
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
public void dropIndex(SearchConnection searchConnection) {
var client = new EsClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
}
@SneakyThrows
private void closeIndex(EsClient client, String indexPrefix) {
var closeIndexResponse = client.indices()
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (closeIndexResponse.acknowledged()) {
log.info("Index is closed");
} else {
throw new IndexException("Error while closing index");
}
}
@SneakyThrows
private void dropIndex(EsClient client, String indexPrefix) {
log.info("Will drop index");
var deleteIndexResponse = client.indices()
.delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (deleteIndexResponse.acknowledged()) {
log.info("Index is dropped");
} else {
throw new IndexException("Error while dropping index");
}
}
}

View File

@ -1,70 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.Optional;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch.indices.GetIndicesSettingsResponse;
import co.elastic.clients.elasticsearch.indices.IndexState;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
@SuppressWarnings("PMD")
public class IndexQueryServiceImpl implements IndexQueryService {
@SneakyThrows
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
getIndexState(searchConnection).ifPresent(indexState -> {
builder.indexFound(true);
var indexSettings = indexState.settings();
if (indexSettings != null) {
String replicas = indexSettings.numberOfReplicas();
String shards = indexSettings.numberOfShards();
if (indexSettings.index() != null) {
if (replicas == null) {
replicas = indexSettings.index().numberOfReplicas();
}
if (shards == null) {
shards = indexSettings.index().numberOfShards();
}
}
builder.numberOfReplicas(replicas).numberOfShards(shards);
}
});
return builder.build();
}
@SneakyThrows
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
var esClient = new EsClient(searchConnection);
var indexName = IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix());
try {
GetIndicesSettingsResponse settings = esClient.indices().getSettings(i -> i.index(indexName));
return Optional.ofNullable(settings.get(indexName));
} catch (ElasticsearchException elasticsearchException) {
return Optional.empty();
}
}
}

View File

@ -1,337 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.MatchedSection;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.Query;
import com.iqser.red.service.search.v1.server.service.SearchService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
import co.elastic.clients.elasticsearch._types.FieldValue;
import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.search.HighlightField;
import co.elastic.clients.elasticsearch.core.search.HighlighterType;
import co.elastic.clients.elasticsearch.core.search.Hit;
import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
import co.elastic.clients.json.JsonData;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.util.StringUtils;
import jakarta.json.JsonObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class SearchServiceImpl implements SearchService {
private final EsClientCache clientCache;
@Timed("redactmanager_search")
public SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections) {
Query query = QueryStringConverter.convert(queryString);
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
highlightFieldMap.put("filename", new HighlightField.Builder().build());
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.query(convertQuery(query,
dossierTemplateIds,
dossierIds,
fileId,
assignee,
includeDeletedDossiers,
includeArchivedDossiers,
workflowStatus,
fileAttributes,
returnSections))
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
.size(getPageSizeOrDefault(pageSize))
.source(s -> s.filter(f -> f.includes("dossierId",
"dossierTemplateId",
"dossierDeleted",
"dossierArchived",
"filename",
"fileId",
"assignee",
"dossierStatus",
"workflowStatus",
"fileAttributes")))
.highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
.trackScores(true)
.build();
SearchResponse response = execute(request);
return convert(response, query);
}
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
try {
return clientCache.getClient().search(searchRequest, IndexDocument.class);
} catch (IOException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.searchFailed(e);
}
}
private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
boolean returnSections) {
var entireQuery = QueryBuilders.bool();
var sectionsQueries = QueryBuilders.bool();
for (String must : query.getMusts()) {
var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
.query(must.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + must));
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
entireQuery.must(filenameOrTextMustQuery);
sectionsQueries.should(textPhraseQuery);
}
for (String should : query.getShoulds()) {
var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
.query(should.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + should));
entireQuery.should(textTermQuery);
entireQuery.should(filenameTermQuery);
entireQuery.should(fileAttributesPhraseQuery);
sectionsQueries.should(textTermQuery);
}
if (returnSections) {
var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
.queryName("sections")
.query(sectionsQueries.build()._toQuery())
.path("sections")
.innerHits(i -> i.size(100)));
entireQuery.should(nestedQuery);
}
var filterQuery = QueryBuilders.bool();
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
for (var dossierTemplateId : dossierTemplateIds) {
if (StringUtils.isNotEmpty(dossierTemplateId)) {
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
}
}
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
}
if (dossierIds != null && !dossierIds.isEmpty()) {
var dossierIdQueryBuilder = QueryBuilders.bool();
for (var dossierId : dossierIds) {
if (StringUtils.isNotEmpty(dossierId)) {
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
}
}
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
}
if (StringUtils.isNotEmpty(fileId)) {
filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
}
if (StringUtils.isNotEmpty(assignee)) {
filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
}
if (includeArchivedDossiers) {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
} else {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
}
if (includeDeletedDossiers) {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
} else {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
}
if (StringUtils.isNotEmpty(workflowStatus)) {
filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
}
if (fileAttributes != null && !fileAttributes.isEmpty()) {
var fileAttributesQueryBuilder = QueryBuilders.bool();
for (var fileAttributeKey : fileAttributes.keySet()) {
if (StringUtils.isNotEmpty(fileAttributeKey)) {
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
.must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
.must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
.build()
._toQuery()));
}
}
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
}
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
}
private SearchResult convert(SearchResponse response, Query query) {
List<Hit> hits = response.hits().hits();
return SearchResult.builder()
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
.maxScore(response.hits().maxScore().floatValue())
.total(response.hits().total().value())
.build();
}
private MatchedDocument convertSearchHit(Hit hit, Query query) {
List<String> m = hit.matchedQueries();
Set<String> matchesTerms = m.stream()
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
.collect(Collectors.toSet());
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
IndexDocument indexDocument = (IndexDocument) hit.source();
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
.score(hit.score().floatValue())
.dossierId(indexDocument.getDossierId())
.dossierTemplateId(indexDocument.getDossierTemplateId())
.fileId(indexDocument.getFileId())
.assignee(indexDocument.getAssignee())
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
.workflowStatus(indexDocument.getWorkflowStatus())
.fileName(indexDocument.getFilename())
.dossierDeleted(indexDocument.isDossierDeleted())
.dossierArchived(indexDocument.isDossierArchived())
.highlights(hit.highlight())
.matchedTerms(matchesTerms)
.unmatchedTerms(unmatchedTerms);
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
}
return matchedDocumentBuilder.build();
}
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
Map<String, String> fileAttributes = new HashMap<>();
if (fileAttributesSourceMap != null) {
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
}
return fileAttributes;
}
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
JsonObject indexSection = hit.source().toJson().asJsonObject();
var jsonArray = indexSection.getJsonArray("pages");
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
return MatchedSection.builder()
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
.sectionNumber(indexSection.getString("sectionNumber"))
.pages(pages)
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
.build();
}
private int getPageSizeOrDefault(int pageSize) {
if (pageSize <= 0) {
return 10;
}
return pageSize;
}
private int getPageOrDefault(int page) {
if (page < 0) {
return 0;
}
return page;
}
}

View File

@ -1,44 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.opensearch.client.opensearch.core.DeleteRequest;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
public void deleteDocument(String fileId) {
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.build();
try {
clientCache.getClient().delete(request);
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentDeleteError(fileId, e);
}
}
}

View File

@ -1,44 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentIndexServiceImpl implements DocumentIndexService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
@Timed("redactmanager_indexDocument")
public void indexDocument(IndexDocument indexDocument) {
try {
clientCache.getClient().index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(indexDocument.getFileId())
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.document(indexDocument));
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
}
}
}

View File

@ -1,53 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
@SneakyThrows
@Timed("redactmanager_updateDocument")
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
try {
clientCache.getClient()
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.doc(indexDocumentUpdate)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentUpdateError(fileId, e);
}
}
@SneakyThrows
@Timed("redactmanager_documentExists")
public boolean documentExists(String fileId) {
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
}
}

View File

@ -1,101 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.opensearch._types.mapping.TypeMapping;
import org.opensearch.client.opensearch.indices.IndexSettings;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import jakarta.json.stream.JsonParser;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
@SuppressWarnings("PMD")
public class IndexCreatorServiceImpl {
private final ElasticsearchSettings settings;
public void createIndex(OpensearchClient client) {
if (!indexExists(client)) {
try {
var response = client.indices()
.create(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix()))
.settings(createIndexSettings(client))
.mappings(createIndexMapping(client)));
log.info("Successfully created index: {}", response.index());
} catch (IOException e) {
log.error("Failed to create index.", e);
}
}
}
private boolean indexExists(OpensearchClient client) {
try {
var response = client.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix())));
return response.value();
} catch (IOException e) {
throw IndexException.indexExists(e);
}
}
@SneakyThrows
private TypeMapping createIndexMapping(OpensearchClient client) {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
try (InputStream is = resource.openStream()) {
JsonpMapper mapper = client._transport().jsonpMapper();
JsonParser parser = mapper.jsonProvider().createParser(is);
return TypeMapping._DESERIALIZER.deserialize(parser, mapper);
}
}
@SneakyThrows
private IndexSettings createIndexSettings(OpensearchClient client) {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
try (InputStream is = resource.openStream()) {
JsonpMapper mapper = client._transport().jsonpMapper();
JsonParser parser = mapper.jsonProvider().createParser(is);
var indexSettingsFromJson = IndexSettings._DESERIALIZER.deserialize(parser, mapper);
// It is not possible to set "index.mapping.nested_objects.limit", OpenSearch seems to not have this param.
// Hopefully they don't hava a limit for this, I was not able to find anything.
// As elasticsearch has a limit for this, and we can't set it, it seems this is the only reason for now to have both clients.
var indexSettings = new IndexSettings.Builder().index(indexSettingsFromJson.index())
.numberOfReplicas(client.getSearchConnection().getNumberOfReplicas())
.numberOfShards(client.getSearchConnection().getNumberOfShards())
.analysis(indexSettingsFromJson.analysis())
.build();
return indexSettings;
}
}
}

View File

@ -1,92 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class IndexDeleteServiceImpl implements IndexDeleteService {
private final OpensearchClientCache clientCache;
private final IndexCreatorServiceImpl indexCreatorService;
public void recreateIndex() {
closeIndex();
dropIndex();
indexCreatorService.createIndex(clientCache.getClient());
}
@Override
public void recreateIndex(SearchConnection searchConnection) {
var client = new OpensearchClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
indexCreatorService.createIndex(client);
}
@SneakyThrows
public void closeIndex() {
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
@SneakyThrows
public void dropIndex() {
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
public void dropIndex(SearchConnection searchConnection) {
var client = new OpensearchClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
}
@SneakyThrows
private void closeIndex(OpensearchClient opensearchClient, String indexPrefix) {
var closeIndexResponse = opensearchClient.indices()
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (closeIndexResponse.acknowledged()) {
log.info("Index is closed");
} else {
throw new IndexException("Error while closing index");
}
}
@SneakyThrows
private void dropIndex(OpensearchClient opensearchClient, String indexPrefix) {
log.info("Will drop index");
var deleteIndexResponse = opensearchClient.indices().delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (deleteIndexResponse.acknowledged()) {
log.info("Index is dropped");
} else {
throw new IndexException("Error while dropping index");
}
}
}

View File

@ -1,56 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.util.Optional;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch.indices.GetIndicesSettingsResponse;
import org.opensearch.client.opensearch.indices.IndexState;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
@SuppressWarnings("PMD")
public class IndexQueryServiceImpl implements IndexQueryService {
@SneakyThrows
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
Optional<IndexState> optionalIndexState = getIndexState(searchConnection);
if (optionalIndexState.isPresent()) {
builder.indexFound(true);
var indexSettings = optionalIndexState.get().settings();
if (indexSettings != null) {
builder.numberOfReplicas(indexSettings.numberOfReplicas()).numberOfShards(indexSettings.numberOfShards());
}
}
return builder.build();
}
@SneakyThrows
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
var opensearchClient = new OpensearchClient(searchConnection);
var indexName = IndexNameHelper.getSearchIndex(opensearchClient.getSearchConnection().getIndexPrefix());
try {
GetIndicesSettingsResponse settings = opensearchClient.indices().getSettings(i -> i.index(indexName));
return Optional.ofNullable(settings.get(indexName));
} catch (OpenSearchException openSearchException) {
return Optional.empty();
}
}
}

View File

@ -1,62 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.experimental.Delegate;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.opensearch.client.RestClient;
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.transport.rest_client.RestClientTransport;
@Data
@SuppressWarnings("PMD")
public class OpensearchClient {
// Lower timeouts should be set per request.
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
private SearchConnection searchConnection;
@Delegate
private OpenSearchClient client;
public OpensearchClient(SearchConnection searchConnection) {
HttpHost[] httpHost = searchConnection.getHosts()
.stream()
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
.toList()
.toArray(new HttpHost[searchConnection.getHosts().size()]);
var builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(0)
.setConnectionRequestTimeout(ABSURD_HIGH_TIMEOUT)
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
var transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
this.searchConnection = searchConnection;
this.client = new OpenSearchClient(transport);
}
@SneakyThrows
public void terminate() {
client._transport().close();
}
}

View File

@ -1,101 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class OpensearchClientCache {
private final TenantsClient tenantsClient;
private final EncryptionDecryptionService encryptionDecryptionService;
private final IndexCreatorServiceImpl indexCreatorService;
@Value("${multitenancy.client-cache.maximumSize:100}")
private Long maximumSize;
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
private Integer expireAfterAccess;
private LoadingCache<String, OpensearchClient> clients;
@SneakyThrows
public void isClientAliveOrTerminate() {
try {
var client = clients.get(TenantContext.getTenantId());
try {
log.info("Checking if client is still alive: {}", client.info());
} catch (Exception e) {
try {
client.terminate();
} catch (Exception e2) {
log.info("Failed to terminate ES Client");
clients.invalidate(TenantContext.getTenantId());
}
}
}catch (Exception e){
log.error("Failed to terminate/invalide client", e);
}
}
@PostConstruct
protected void createCache() {
clients = CacheBuilder.newBuilder()
.maximumSize(maximumSize)
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
.removalListener((RemovalListener<String, OpensearchClient>) removal -> {
try {
removal.getValue().terminate();
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
} catch (Exception e) {
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
}
})
.build(new CacheLoader<>() {
public OpensearchClient load(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
if (tenant.getSearchConnection().getPassword() != null) {
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
}
var client = new OpensearchClient(tenant.getSearchConnection());
log.info("Initialized elasticsearch client for tenant {}", tenantId);
indexCreatorService.createIndex(client);
return client;
}
});
}
@SneakyThrows
public OpensearchClient getClient() {
return clients.get(TenantContext.getTenantId());
}
}

View File

@ -1,357 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.opensearch.client.json.JsonData;
import org.opensearch.client.opensearch._types.FieldValue;
import org.opensearch.client.opensearch._types.query_dsl.ChildScoreMode;
import org.opensearch.client.opensearch._types.query_dsl.QueryBuilders;
import org.opensearch.client.opensearch.core.SearchRequest;
import org.opensearch.client.opensearch.core.SearchResponse;
import org.opensearch.client.opensearch.core.search.BuiltinHighlighterType;
import org.opensearch.client.opensearch.core.search.HighlightField;
import org.opensearch.client.opensearch.core.search.HighlighterType;
import org.opensearch.client.opensearch.core.search.Hit;
import org.opensearch.client.opensearch.core.search.InnerHitsResult;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.MatchedSection;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.Query;
import com.iqser.red.service.search.v1.server.service.SearchService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.util.StringUtils;
import jakarta.json.JsonObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class SearchServiceImpl implements SearchService {
private final OpensearchClientCache clientCache;
@Timed("redactmanager_search")
public SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections) {
Query query = QueryStringConverter.convert(queryString);
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
highlightFieldMap.put("filename", new HighlightField.Builder().build());
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.query(convertQuery(query,
dossierTemplateIds,
dossierIds,
fileId,
assignee,
includeDeletedDossiers,
includeArchivedDossiers,
workflowStatus,
fileAttributes,
returnSections))
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
.size(getPageSizeOrDefault(pageSize))
.source(s -> s.filter(f -> f.includes("dossierId",
"dossierTemplateId",
"dossierDeleted",
"dossierArchived",
"filename",
"fileId",
"assignee",
"dossierStatus",
"workflowStatus",
"fileAttributes")))
.highlight(h -> h.type(HighlighterType.of(b -> b.builtin(BuiltinHighlighterType.FastVector))).fields(highlightFieldMap))
.trackScores(true)
.build();
SearchResponse response = execute(request);
return convert(response, query);
}
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
try {
return clientCache.getClient().search(searchRequest, IndexDocument.class);
} catch (IOException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.searchFailed(e);
}
}
private org.opensearch.client.opensearch._types.query_dsl.Query convertQuery(Query query,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
boolean returnSections) {
var entireQuery = QueryBuilders.bool();
var sectionsQueries = QueryBuilders.bool();
for (String must : query.getMusts()) {
var textPhraseQuery = QueryBuilders.matchPhrase().field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must).build()._toQuery();
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must).build()._toQuery();
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
.field("fileAttributes.value")
.query(must.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + must)
.build()
._toQuery();
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
entireQuery.must(filenameOrTextMustQuery);
sectionsQueries.should(textPhraseQuery);
}
for (String should : query.getShoulds()) {
var textTermQuery = QueryBuilders.matchPhrase().field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should).build()._toQuery();
var filenameTermQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should).build()._toQuery();
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
.field("fileAttributes.value")
.query(should.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + should)
.build()
._toQuery();
entireQuery.should(textTermQuery);
entireQuery.should(filenameTermQuery);
entireQuery.should(fileAttributesPhraseQuery);
sectionsQueries.should(textTermQuery);
}
if (returnSections) {
var nestedQuery = QueryBuilders.nested()
.scoreMode(ChildScoreMode.Avg)
.queryName("sections")
.query(sectionsQueries.build()._toQuery())
.path("sections")
.innerHits(i -> i.size(100))
.build()
._toQuery();
entireQuery.should(nestedQuery);
}
var filterQuery = QueryBuilders.bool();
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
for (var dossierTemplateId : dossierTemplateIds) {
if (StringUtils.isNotEmpty(dossierTemplateId)) {
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match()
.field("dossierTemplateId")
.query(q -> q.stringValue(dossierTemplateId))
.build()
._toQuery());
}
}
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
}
if (dossierIds != null && !dossierIds.isEmpty()) {
var dossierIdQueryBuilder = QueryBuilders.bool();
for (var dossierId : dossierIds) {
if (StringUtils.isNotEmpty(dossierId)) {
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match().field("dossierId").query(q -> q.stringValue(dossierId)).build()._toQuery());
}
}
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
}
if (StringUtils.isNotEmpty(fileId)) {
filterQuery.must(QueryBuilders.match().field("fileId").query(q -> q.stringValue(fileId)).build()._toQuery());
}
if (StringUtils.isNotEmpty(assignee)) {
filterQuery.must(QueryBuilders.match().field("assignee").query(q -> q.stringValue(assignee)).build()._toQuery());
}
if (includeArchivedDossiers) {
filterQuery.must(QueryBuilders.terms()
.field("dossierArchived")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
.build()
._toQuery());
} else {
filterQuery.must(QueryBuilders.terms().field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
}
if (includeDeletedDossiers) {
filterQuery.must(QueryBuilders.terms()
.field("dossierDeleted")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
.build()
._toQuery());
} else {
filterQuery.must(QueryBuilders.terms().field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
}
if (StringUtils.isNotEmpty(workflowStatus)) {
filterQuery.must(QueryBuilders.match().field("workflowStatus").query(q -> q.stringValue(workflowStatus)).build()._toQuery());
}
if (fileAttributes != null && !fileAttributes.isEmpty()) {
var fileAttributesQueryBuilder = QueryBuilders.bool();
for (var fileAttributeKey : fileAttributes.keySet()) {
if (StringUtils.isNotEmpty(fileAttributeKey)) {
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
.must(QueryBuilders.match().field("fileAttributes.name").query(q -> q.stringValue(fileAttributeKey)).build()._toQuery())
.must(QueryBuilders.match().field("fileAttributes.value").query(q -> q.stringValue(fileAttributes.get(fileAttributeKey))).build()._toQuery())
.build()
._toQuery()));
}
}
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
}
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
}
private SearchResult convert(SearchResponse response, Query query) {
List<Hit> hits = response.hits().hits();
return SearchResult.builder()
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
.maxScore(response.maxScore() == null ? 0 : response.maxScore().floatValue())
.total(response.hits().total().value())
.build();
}
private MatchedDocument convertSearchHit(Hit hit, Query query) {
List<String> m = hit.matchedQueries();
Set<String> matchesTerms = m.stream()
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
.collect(Collectors.toSet());
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
IndexDocument indexDocument = (IndexDocument) hit.source();
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
.score(hit.score().floatValue())
.dossierId(indexDocument.getDossierId())
.dossierTemplateId(indexDocument.getDossierTemplateId())
.fileId(indexDocument.getFileId())
.assignee(indexDocument.getAssignee())
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
.workflowStatus(indexDocument.getWorkflowStatus())
.fileName(indexDocument.getFilename())
.dossierDeleted(indexDocument.isDossierDeleted())
.dossierArchived(indexDocument.isDossierArchived())
.highlights(hit.highlight())
.matchedTerms(matchesTerms)
.unmatchedTerms(unmatchedTerms);
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
}
return matchedDocumentBuilder.build();
}
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
Map<String, String> fileAttributes = new HashMap<>();
if (fileAttributesSourceMap != null) {
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
}
return fileAttributes;
}
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
JsonObject indexSection = hit.source().toJson().asJsonObject();
var jsonArray = indexSection.getJsonArray("pages");
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
return MatchedSection.builder()
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
.sectionNumber(indexSection.getString("sectionNumber"))
.pages(pages)
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
.build();
}
private int getPageSizeOrDefault(int pageSize) {
if (pageSize <= 0) {
return 10;
}
return pageSize;
}
private int getPageOrDefault(int page) {
if (page < 0) {
return 0;
}
return page;
}
}

View File

@ -1,5 +1,9 @@
package com.iqser.red.service.search.v1.server.settings;
import java.util.ArrayList;
import java.util.List;
import org.opensearch.action.support.WriteRequest;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
@ -12,11 +16,24 @@ import lombok.Data;
@ConfigurationProperties("elasticsearch")
public class ElasticsearchSettings {
private List<String> hosts = new ArrayList<>();
private int port = 9300;
private String scheme = "http";
private String apiKeyAuth;
private String username;
private String password;
private int numberOfShards = 5;
private int numberOfReplicas = 1;
private int numberOfNestedObjectLimit = 100000;
/**
* ES refresh policy for write requests to use. Used in tests to wait for completion of write requests.
*/
private String refreshPolicy = "true";
private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.NONE;
}

View File

@ -1,16 +0,0 @@
package com.iqser.red.service.search.v1.server.utils;
import lombok.experimental.UtilityClass;
@UtilityClass
public class IndexNameHelper {
private static final String SEARCH_INDEX = "%s_search";
public String getSearchIndex(String indexPrefix) {
return String.format(SEARCH_INDEX, indexPrefix);
}
}

View File

@ -0,0 +1,29 @@
package com.iqser.red.service.search.v1.server.utils;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.stream.Collectors;
import lombok.experimental.UtilityClass;
@UtilityClass
public class ResourceLoader {
public String load(String classpathPath) {
URL resource = ResourceLoader.class.getClassLoader().getResource(classpathPath);
if (resource == null) {
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath);
}
try (InputStream is = resource.openStream(); InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8); BufferedReader br = new BufferedReader(isr)) {
return br.lines().collect(Collectors.joining("\n"));
} catch (IOException e) {
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath, e);
}
}
}

View File

@ -1,5 +0,0 @@
server:
port: 8099
persistence-service.url: "http://localhost:8085"
tenant-user-management-service.url: "http://localhost:8091/tenant-user-management/internal"

View File

@ -2,28 +2,11 @@ info:
description: Search Service Server V1
persistence-service.url: "http://persistence-service-v1:8080"
tenant-user-management-service.url: "http://tenant-user-management-service:8080/internal"
fforesight:
tenants.remote: true
tenant-exchange.name: 'tenants-exchange'
logging.pattern.level: "%5p [${spring.application.name},%X{traceId:-},%X{spanId:-}]"
logging.type: ${LOGGING_TYPE:CONSOLE}
kubernetes.namespace: ${NAMESPACE:default}
project.version: 1.0-SNAPSHOT
server:
port: 8080
lifecycle:
base-package: com.iqser.red.service.search
spring:
application:
name: search-service
main:
allow-circular-references: true # FIXME
profiles:
@ -50,11 +33,19 @@ management:
health.enabled: true
endpoints.web.exposure.include: prometheus, health
metrics.export.prometheus.enabled: ${monitoring.enabled:false}
tracing:
enabled: ${TRACING_ENABLED:false}
sampling:
probability: ${TRACING_PROBABILITY:1.0}
otlp:
tracing:
endpoint: ${OTLP_ENDPOINT:http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces}
elasticsearch:
hosts:
- ${elasticsearch.cluster.hosts}
port: ${elasticsearch.cluster.port:9200}
scheme: ${elasticsearch.cluster.scheme:http}
username: ${elasticsearch.cluster.username}
password: ${elasticsearch.cluster.password}
apiKeyAuth: ${elasticsearch.cluster.apikey}
storage:
signer-type: 'AWSS3V4SignerType'
bucket-name: 'redaction'
region: 'us-east-1'
endpoint: 'https://s3.amazonaws.com'
backend: 's3'

View File

@ -1,17 +0,0 @@
<configuration>
<springProperty scope="configuration" name="logType" source="logging.type"/>
<springProperty scope="context" name="application.name" source="spring.application.name"/>
<springProperty scope="context" name="version" source="project.version"/>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
<appender name="JSON" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LogstashEncoder"/>
</appender>
<root level="INFO">
<appender-ref ref="${logType}"/>
</root>
</configuration>

View File

@ -1,112 +1,81 @@
package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import java.util.Set;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.boot.test.util.TestPropertyValues;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.context.annotation.Primary;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.springframework.test.context.junit4.SpringRunner;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import org.testcontainers.utility.DockerImageName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.server.Application;
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.iqser.red.storage.commons.service.StorageService;
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
@ComponentScan
@ExtendWith(SpringExtension.class)
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS})
@ContextConfiguration(initializers = {AbstractElasticsearchIntegrationTest.Initializer.class})
@EnableFeignClients(basePackageClasses = AbstractElasticsearchIntegrationTest.TestConfiguration.class)
@DirtiesContext
@AutoConfigureObservability
@SuppressWarnings("PMD")
public abstract class AbstractElasticsearchIntegrationTest {
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
public static final String SEARCH_BACKEND = "search.backend=elasticsearch";
@MockBean
private TenantsClient tenantsClient;
@MockBean
private RabbitAdmin rabbitAdmin;
@MockBean
private RabbitListenerEndpointRegistry rabbitListenerEndpointRegistry;
private static int port;
@BeforeEach
public void setupOptimize() {
TenantContext.setTenantId("redaction");
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
.searchConnection(SearchConnection.builder()
.hosts(Set.of("localhost"))
.port(port)
.scheme("http")
.numberOfShards("1")
.numberOfReplicas("5")
.indexPrefix("indexprefix")
.build())
.build());
}
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=IMMEDIATE";
static class Initializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
esContainer.getEnvMap().put("xpack.security.enabled", "false");
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:7.17.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
esContainer.start();
var esHost = esContainer.getHttpHostAddress();
port = Integer.parseInt(esHost.substring(esHost.lastIndexOf(':') + 1));
String esHost = esContainer.getHttpHostAddress();
// String host = esHost.substring(0, esHost.indexOf(':'));
int port = Integer.parseInt(esHost.substring(esHost.indexOf(':') + 1));
TestPropertyValues.of(
// "elasticsearch.cluster.hosts[0]=" + host,
"elasticsearch.port=" + port).applyTo(configurableApplicationContext.getEnvironment());
}
}
@Autowired
protected IndexCreatorService indexCreationService;
@Autowired
protected StorageService storageService;
@Configuration
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
@EnableAutoConfiguration(exclude = {StorageAutoConfiguration.class, RabbitAutoConfiguration.class})
public static class TestConfiguration {
@Bean
@Primary
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
public IndexCreatorService indexCreationService(ElasticsearchClient elasticsearchClient, ElasticsearchSettings elasticsearchSettings) {
return new FileSystemBackedStorageService(objectMapper);
return new IndexCreatorService(elasticsearchClient, elasticsearchSettings);
}
@Bean
@Primary
public StorageService inmemoryStorage() {
return new FileSystemBackedStorageService();
}
}

View File

@ -1,104 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import java.util.Set;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.opensearch.testcontainers.OpensearchContainer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.ApplicationContextInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.context.annotation.Primary;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.testcontainers.utility.DockerImageName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.server.Application;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.iqser.red.storage.commons.service.StorageService;
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
@ComponentScan
@ExtendWith(SpringExtension.class)
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractOpensearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractOpensearchIntegrationTest.SEARCH_BACKEND})
@ContextConfiguration(initializers = {AbstractOpensearchIntegrationTest.Initializer.class})
@EnableFeignClients(basePackageClasses = AbstractOpensearchIntegrationTest.TestConfiguration.class)
@DirtiesContext
@SuppressWarnings("PMD")
public abstract class AbstractOpensearchIntegrationTest {
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
public static final String SEARCH_BACKEND = "search.backend=opensearch";
@MockBean
private TenantsClient tenantsClient;
private static int port;
@BeforeEach
public void setupOptimize() {
TenantContext.setTenantId("redaction");
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
.searchConnection(SearchConnection.builder()
.hosts(Set.of("localhost"))
.port(port)
.scheme("http")
.numberOfShards("1")
.numberOfReplicas("5")
.indexPrefix("indexprefix")
.build())
.build());
}
static class Initializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
var esContainer = new OpensearchContainer(DockerImageName.parse("opensearchproject/opensearch:2.6.0"));
esContainer.start();
String esHost = esContainer.getHttpHostAddress();
port = Integer.parseInt(esHost.substring(esHost.lastIndexOf(':') + 1));
}
}
@Autowired
protected StorageService storageService;
@Configuration
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
public static class TestConfiguration {
@Bean
@Primary
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
return new FileSystemBackedStorageService(objectMapper);
}
}
}

View File

@ -0,0 +1,127 @@
package com.iqser.red.service.search.v1.server.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.springframework.core.io.InputStreamResource;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.commons.jackson.ObjectMapperFactory;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import lombok.SneakyThrows;
public class FileSystemBackedStorageService implements StorageService {
private final Map<String, File> dataMap = new HashMap<>();
public FileSystemBackedStorageService() {
}
@SneakyThrows
@Override
public InputStreamResource getObject(String objectId) {
var res = dataMap.get(objectId);
if (res == null) {
throw new StorageObjectDoesNotExist(new RuntimeException());
}
return new InputStreamResource(new FileInputStream(res));
}
@Override
public void deleteObject(String objectId) {
dataMap.remove(objectId);
}
@Override
public boolean objectExists(String objectId) {
return dataMap.containsKey(objectId);
}
@Override
public void init() {
}
@Override
@SneakyThrows
public <T> void storeJSONObject(String objectId, T any) {
File tempFile = File.createTempFile("test", ".tmp");
getMapper().writeValue(new FileOutputStream(tempFile), any);
dataMap.put(objectId, tempFile);
}
private ObjectMapper getMapper() {
return ObjectMapperFactory.create();
}
@Override
@SneakyThrows
public <T> T readJSONObject(String objectId, Class<T> clazz) {
if (dataMap.get(objectId) == null || !dataMap.get(objectId).exists()) {
throw new StorageObjectDoesNotExist("Stored object not found");
}
return getMapper().readValue(new FileInputStream(dataMap.get(objectId)), clazz);
}
public List<String> listPaths() {
return new ArrayList<>(dataMap.keySet());
}
public List<String> listFilePaths() {
return dataMap.values().stream().map(File::getAbsolutePath).collect(Collectors.toList());
}
@Override
@SneakyThrows
public void storeObject(String objectId, InputStream stream) {
File tempFile = File.createTempFile("test", ".tmp");
try (var fileOutputStream = new FileOutputStream(tempFile)) {
IOUtils.copy(stream, fileOutputStream);
}
dataMap.put(objectId, tempFile);
}
public void clearStorage() {
this.dataMap.forEach((k, v) -> {
v.delete();
});
this.dataMap.clear();
}
}

View File

@ -2,12 +2,12 @@ package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.junit.Assert;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
import com.iqser.red.service.persistence.service.v1.api.model.index.IndexInformation;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
@ -37,7 +37,7 @@ public class IndexTest extends AbstractElasticsearchIntegrationTest {
// Assert
System.out.println(hash);
Assertions.assertNotNull(hash);
Assert.assertNotNull(hash);
}
@ -50,7 +50,7 @@ public class IndexTest extends AbstractElasticsearchIntegrationTest {
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
// Act and Assert
Assertions.assertTrue(indexInformationService.hasIndexChanged());
Assert.assertTrue(indexInformationService.hasIndexChanged());
}
@ -63,7 +63,7 @@ public class IndexTest extends AbstractElasticsearchIntegrationTest {
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
// Act and Assert
Assertions.assertFalse(indexInformationService.hasIndexChanged());
Assert.assertFalse(indexInformationService.hasIndexChanged());
}
@ -74,7 +74,7 @@ public class IndexTest extends AbstractElasticsearchIntegrationTest {
when(indexInformationClient.getIndexInformation()).thenReturn(null);
// Act and Assert
Assertions.assertTrue(indexInformationService.hasIndexChanged());
Assert.assertTrue(indexInformationService.hasIndexChanged());
}
}

View File

@ -2,19 +2,17 @@ package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Test;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.junit.Test;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.core.io.ClassPathResource;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.model.IndexMessage;
import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.model.SearchRequest;
@ -26,7 +24,6 @@ import com.iqser.red.service.search.v1.server.controller.SearchController;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
import com.iqser.red.service.search.v1.server.service.utils.MetricValidationUtils;
import com.knecon.fforesight.tenantcommons.TenantContext;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import lombok.SneakyThrows;
@ -69,7 +66,7 @@ public class MetricsIntegrationTest extends AbstractElasticsearchIntegrationTest
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
storageService.storeJSONObject(TenantContext.getTenantId(), TextStorageService.StorageIdUtils.getStorageId("1", "1", FileType.SIMPLIFIED_TEXT), text);
storageService.storeJSONObject(TextStorageService.StorageIdUtils.getStorageId("1", "1", FileType.SIMPLIFIED_TEXT), text);
IndexMessage indexRequest = new IndexMessage();
indexRequest.setDossierId("1");

View File

@ -6,17 +6,15 @@ import java.io.IOException;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Test;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.junit.Test;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.core.io.ClassPathResource;
import org.testcontainers.shaded.org.apache.commons.lang3.StringUtils;
import org.testcontainers.shaded.org.apache.commons.lang.StringUtils;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.persistence.service.v1.api.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.client.DossierClient;
@ -27,7 +25,7 @@ import com.iqser.red.service.search.v1.server.model.Text;
import lombok.SneakyThrows;
public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
public class SearchTest extends AbstractElasticsearchIntegrationTest {
@Autowired
private ObjectMapper objectMapper;
@ -65,37 +63,9 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
@MockBean
private IndexInformationService indexInformationService;
@Autowired
private IndexDocumentConverterService indexDocumentConverterService;
private final long UPDATE_TIMER = 1500;
@Test
@SneakyThrows
public void testSimpleSectionsQuery() {
ClassPathResource textResource = new ClassPathResource("files/crafted document.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
"dossierId1",
"fileId1",
"crafted document.pdf",
text,
"UserId",
false,
false,
WorkflowStatus.NEW,
null);
SearchResult result = searchService.search("Cyberdyne", null, null, null, null, false, false, null, null, 0, 10, true);
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
}
@Test
@SneakyThrows
public void testSearchFileAttribute() {
@ -103,7 +73,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -113,7 +83,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -138,7 +108,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -178,7 +148,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -188,7 +158,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -210,14 +180,14 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 3
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, WorkflowStatus.NEW.name(), Map.of("F2Key", "F2Value"), 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 4
updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
documentUpdateService.updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("F2Key", "F2Value"), 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
@ -236,7 +206,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -246,7 +216,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -263,7 +233,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 2
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, "UserId2", false, false, null, null, 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
@ -282,7 +252,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -292,7 +262,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -314,7 +284,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 3
updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
documentUpdateService.updateDocument("fileId1", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, WorkflowStatus.APPROVED.name(), null, 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
@ -326,7 +296,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 5
updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
documentUpdateService.updateDocument("fileId2", "UserId2", false, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, WorkflowStatus.APPROVED.name(), null, 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
@ -345,7 +315,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -355,7 +325,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -385,7 +355,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 3
updateDocument("fileId1", "UserId2", false, true, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
documentUpdateService.updateDocument("fileId1", "UserId2", false, true, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, null, 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
@ -398,7 +368,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId2");
// Act & Assert 5
updateDocument("fileId2", "UserId2", true, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
documentUpdateService.updateDocument("fileId2", "UserId2", true, false, WorkflowStatus.APPROVED.name(), Map.of("U2Key", "U2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, null, 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
@ -419,7 +389,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
indexDocument("template1",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-1.pdf",
@ -429,7 +399,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06-2.pdf",
@ -463,7 +433,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
// Act & Assert 6
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1ValueNEW"));
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1ValueNEW"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value"), 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
@ -474,14 +444,14 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId1");
// Act & Assert 8
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1Value", "F2Key", "F2Value"));
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), Map.of("F1Key", "F1Value", "F2Key", "F2Value"));
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value", "F2Key", "F2Value"), 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
assertThat(result.getMatchedDocuments().stream().map(MatchedDocument::getFileId)).contains("fileId1");
// Act & Assert 9
updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), null);
documentUpdateService.updateDocument("fileId1", "UserId", false, false, WorkflowStatus.NEW.name(), null);
Thread.sleep(UPDATE_TIMER);
result = searchService.search("S-Metolachlor", null, null, null, null, false, false, null, Map.of("F1Key", "F1Value"), 0, 10, false);
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
@ -497,8 +467,17 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
ClassPathResource textResource2 = new ClassPathResource("files/Text2.json");
Text text2 = objectMapper.readValue(textResource2.getInputStream(), Text.class);
indexDocument("template1", "dossierId1", "fileId1", "Single Study - Oral (Gavage) Mouse.pdf", text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
indexDocument("template2",
documentIndexService.indexDocument("template1",
"dossierId1",
"fileId1",
"Single Study - Oral (Gavage) Mouse.pdf",
text,
"UserId",
false,
false,
WorkflowStatus.NEW,
Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template2",
"dossierId2",
"fileId2",
"S-Metolachlor_RAR_01_Volume_1_2018-09-06.pdf",
@ -545,7 +524,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-skywalker-42.pdf";
String searchString = "szedhsegkekhglghserlkghrsdvkerxyfdbvkrdjgh";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 1, 10, true);
@ -565,7 +544,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-skywalker-42.pdf";
String searchString = fileName;
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -587,7 +566,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-blankwalker 42.pdf";
String searchString = fileName;
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -610,7 +589,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke.pdf";
String searchString = "luke";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -632,7 +611,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "Abamectin";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -654,7 +633,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "prr";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -676,7 +655,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "1";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -698,7 +677,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "1 Abamectin_prr.pdf";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -719,7 +698,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "1 Abamectin";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -740,7 +719,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "1 Abamectin_prr.pdf";
String searchString = "_prr.pdf";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -759,8 +738,8 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
String fileName = "116 IDD0000261308.pdf";
String filename2 = "115 IDD0000261308.pdf";
String searchString = "\"116 IDD\"";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
indexDocument("template1", "dossierId1", "fileId2", filename2, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId2", filename2, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -768,7 +747,6 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(result.getMatchedDocuments().get(0).getMatchedTerms().size()).isGreaterThan(0);
}
@Test
public void testFilenameWithNumbersMatch() throws IOException {
@ -776,7 +754,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "VV-733382.pdf";
String searchString = "733382";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -795,7 +773,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "LUKE.pdf";
String searchString = "luke";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -817,7 +795,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke.pdf";
String searchString = "luke.pdf";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -839,7 +817,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "Äpfel.pdf";
String searchString = "äpfel";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -861,7 +839,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "Äpfel.pdf";
String searchString = "äpfel.pdf";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -883,7 +861,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-skywalker-42.pdf";
String searchString = "luke";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -905,7 +883,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-skywalker-42.pdf";
String searchString = "luke-skywalker-42";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -928,7 +906,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
String fileName = "luke-skywalker-42.pdf";
String matchedString = fileName;
String searchString = "\"" + matchedString + "\"";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -951,7 +929,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
String fileName = "luke-skywalker-42.pdf";
String matchedString = "luke";
String searchString = "\"" + matchedString + "\"";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -974,7 +952,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
String fileName = "luke-skywalker-42.pdf";
String matchedString = "luke-skywalker-42";
String searchString = "\"luke-skywalker-42\"";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -997,7 +975,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
String fileName = "luke-blankwalker 42.pdf";
String matchedString = fileName;
String searchString = "\"" + matchedString + "\"";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -1019,7 +997,7 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
String fileName = "luke-skywalker-42.pdf";
String searchString = "14C]-SDS-46851";
indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
documentIndexService.indexDocument("template1", "dossierId1", "fileId1", fileName, text, "UserId", false, false, WorkflowStatus.NEW, Map.of("F1Key", "F1Value"));
// Act
SearchResult result = searchService.search(searchString, null, List.of("dossierId1"), null, null, false, false, null, null, 0, 10, true);
@ -1032,37 +1010,4 @@ public class ElasticsearchTest extends AbstractElasticsearchIntegrationTest {
assertThat(StringUtils.contains(result.getMatchedDocuments().get(0).getHighlights().get("sections.text").toArray()[0].toString(), searchString)).isTrue();
}
private void indexDocument(String dossierTemplateId,
String dossierId,
String fileId,
String filename,
Text text,
String assignee,
boolean deleted,
boolean archived,
WorkflowStatus workflowStatus,
Map<String, String> fileAttributes) {
var indexDocument = indexDocumentConverterService.convert(dossierTemplateId,
dossierId,
fileId,
filename,
text,
assignee,
deleted,
archived,
workflowStatus,
fileAttributes);
documentIndexService.indexDocument(indexDocument);
}
public void updateDocument(String fileId, String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
var updateDocument = indexDocumentConverterService.convertUpdateDocument(assignee, deleted, archived, workflowStatus, fileAttributes);
documentUpdateService.updateDocument(fileId, updateDocument);
}
}

View File

@ -1,24 +1,23 @@
ribbon:
ConnectTimeout: 600000
ReadTimeout: 600000
logging.type: ${LOGGING_TYPE:CONSOLE}
logging.level.root: INFO
fforesight:
tenant-exchange:
name: 'tenants-exchange'
spring:
main:
allow-bean-definition-overriding: true
allow-circular-references: true # FIXME
storage:
signer-type: 'AWSS3V4SignerType'
bucket-name: 'redaction'
region: 'us-east-1'
endpoint: 'https://s3.amazonaws.com'
backend: 's3'
elasticsearch:
hosts:
- 'localhost'
management:
endpoint:
@ -29,8 +28,3 @@ management:
metrics.export.prometheus.enabled: true
persistence-service.url: 'http://mock.url'
server:
port: 19547
POD_NAME: search-service

View File

@ -1,205 +0,0 @@
{
"numberOfPages": 9,
"sectionTexts": [
{
"sectionNumber": "1",
"text": "Rule 0: Expand CBI Authors with firstname initials F. Lastname, J. Doe, M. Mustermann Lastname M., Doe J. Mustermann M."
},
{
"sectionNumber": "2",
"text": "Rule 1/2: Redact CBI Authors based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No David Ksenia Max Mustermann Ranya Eikenboom Charalampos Schenk Tanja Schmitt ← should not be annotated, not in Dictionary"
},
{
"sectionNumber": "3",
"text": "Rule 3/4: Redact (not) CBI Add/ress based on Dict Dont Redact (mark as skipped) when Vertebrate Study is No Redact when Vertebrate Study is Yes Warnsveld, 7232 CX Warnsveld, Netherlands, NL Institut Industries, 33 Rue Jean Baffier, 18000 Bourges, France, FR 4-6 Chem. des Varennes, 18300 Saint-Satur, France, FR Lesdo Industries, Chäppelisträssli, 6078 Lungern, Switzerland Shlissel'burgskaya Ulitsa, Nizhny Novgorod Oblast, Russia, 603034, RU Karl Johans Gate 11, 0154 Oslo, Norway, NOR ← should not be annotated, not in Dictionary"
},
{
"sectionNumber": "4",
"text": "Rule 5: Do not redact genitive CBI_authors (Entries based on Dict) Expand to Hint Clarissas Donut ← not added to Dict, should be not annotated Simpson's Tower ← added to Authors-Dict, should be annotated"
},
{
"sectionNumber": "5",
"text": "Reference No Author(s) Year Title Laboratory"
},
{
"sectionNumber": "6",
"text": "BR2 /2 Michael N. 1998 The role of physical education in the school system. Weyland Industries"
},
{
"sectionNumber": "7",
"text": "BR3 /5 Funnarie B. 2001 It should be illegal to produce and sell tobacco Authentic Diagnostics"
},
{
"sectionNumber": "8",
"text": "ZZ/12 Feuer A. 1989 Social media is the real cause of teenage depression. Tyrell Corporation"
},
{
"sectionNumber": "10",
"text": "Rule 6-11 (Authors Table) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No"
},
{
"sectionNumber": "11",
"text": "Rule 12/13: Redact/Hint if CTL or BL was found Redact when Vertebrate Study is Yes Hint when Vertebrate Study is No CTL/without dictionary entry CTL without Slash BL/without dictionary entry BL without Slash CTL/with dictionary entry 1234 with Slash CTL with dictionary entry 5678 without Slash BL/with dictionary entry 1234 with Slash BL with dictionary entry 5678 without Slash"
},
{
"sectionNumber": "12",
"text": "Rule 14/15: Redact and add recommendation for et al. Redact Term “Desiree”, “Melanie” and add to Recommendation CBI Authors if Vertebrate Study is Yes & No Lorem ipsum dolor sit amet, consectetur adipiscing elit Desiree et al sed do eiusmod tempor incididunt ut labore et dolore magna aliqua Melanie et al. Reference No 12345 Lorem ipsum."
},
{
"sectionNumber": "13",
"text": "Rule 16/17: Add recommendation for Addresses in Test Organism/Animals sections Recommend only if Vertebrate Study is Yes, else do nothing Lorem ipsum dolor sit Species: Mouse; Source: Stark Industries"
},
{
"sectionNumber": "14",
"text": "Rule 16/17 (additional) negative Test; missing first Key Nothing should happen because of missing first/second keyword according to the rules Dont redact here because of missing first key; Source: Omni Consumer Products Dont redact here because missing first keyword; Source Resources Development Administration"
},
{
"sectionNumber": "15",
"text": "Rule 16/17 (additional) negative Test; missing second Key Dont redact here because of missing second key; Species: Mouse; Omni Consumer Products Dont redact here because missing second keyword; Species: Mouse, Resources Development Administration"
},
{
"sectionNumber": "16",
"text": "Rule 18: Do not redact Names and Addresses if Published Information found Do not redact Names and Addresses if Published Information found Lorem ipsum dolor sit amet Oxford University Press in voluptate velit esse cillum. Iakovos Geiger, Julian Ritter, Asya Lyon, Carina Madsen, Alexandra Häusler, Hanke Mendel, Ranya Eikenboom. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Min Kwok, Jun K., Tu Wong, Qiang Suen, Zhou Mah, Ning Liu, Lei W. Huang, Ru X. Wu"
},
{
"sectionNumber": "17",
"text": "Rule 19/20: Redacted PII Personal Identification Information based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Naka-27 Aomachi, Nomi, Ishikawa 923-1101, Japan, JP Sude Halide Nurullah Özgür U. Reyhan B. Rahim C. J. Alfred Xinyi Y. Tao Clara Siegfried ← not added to Dict, should be not annotated"
},
{
"sectionNumber": "18",
"text": "Rule 21/22: Redact Emails by RegEx Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Duis aute irure dolor in library@outlook.com reprehenderit in voluptate gordonjcp@msn.com velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint dinther@comcast.net occaecat cupidatat non proident, sunt in kawasaki@me.com culpa qui officia deserunt mollit anim id est laborum."
},
{
"sectionNumber": "19",
"text": "Description Text Contact Point"
},
{
"sectionNumber": "20",
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
},
{
"sectionNumber": "22",
"text": "Rule 23/24: Redact contact information (contains \"Contact point:\") Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Contact-Information was found should be appears”"
},
{
"sectionNumber": "23",
"text": "Description Text Applicant"
},
{
"sectionNumber": "24",
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
},
{
"sectionNumber": "26",
"text": "Rule 25/26: Redact contact information (contains \"Applicant\" as Headline or Text) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Applicant Information was found should be appears” Applicant Name: Soylent Corporation Contact point: Riddley Scott Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: food-industry@korea.com Contact: This is a special case, everything between this and the next keyword should be redacted Tel.: +275 5678 1234 132 fsdfdfre frefref"
},
{
"sectionNumber": "27",
"text": "Rule 27/28: Redact contact Information (contains Producer) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Producer was found” should be appears Producer of the plant production Name: Umbrella Corporation Contact: Jill Valentine Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: pharma-industry@korea.com"
},
{
"sectionNumber": "28",
"text": "Rule 29/30/31/32: If Text contains \"AUTHORS:\" and \"COMPLETION DATES\" but not \"STUDY COMPLETION DATES\", then Redact between both Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ AUTHOR(S): Dr. Alan Grant COMPLETION DATE: 02 December 1997"
},
{
"sectionNumber": "29",
"text": "Rule 29/30/31/32: (additional) negative Test for Study completion dates No Redaction should be appears here Study Report___ AUTHOR(S): Dr. Alan Grant STUDY COMPLETION DATE: 02 December 1997"
},
{
"sectionNumber": "30",
"text": "Rule 33/34: If Text contains \"Performing Lab\" and \"Lab Project ID\", then Redact everything between Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ PERFORMING LABORATORY: Umbrella Corporation LABORATORY PROJECT ID: Number 20201/33991/ERZAT/21"
},
{
"sectionNumber": "31",
"text": "Rule 35/36/37/38: ?? Tba"
},
{
"sectionNumber": "32",
"text": "Rule 39: Purity Hint Add Purity as Hint when Percent-Numbers is there Test Item: Soda Purity: 45% ← should be Hint Purity: <45% ← should be Hint Purity: >45% ← should be Hint Purity: 101% ← should ne be Hint because >100 % is not possible Purity: =>45% ← should be not Hint because additional symbols Purity: =<45% ← should be not Hint because additional symbols Purity: aa 45% ← should be not Hint because additional symbols Purity: 45% aa ← should be not Hint because additional symbols Purity: aa45% ← should be not Hint because additional symbols Purity: 45%aa ← should be not Hint because additional symbols Product-Code: EAK-L443 purity: 99% ← not Hint because case sensitive purity: >99% ← not Hint because case sensitive purity: <99% ← not Hint because case sensitive Supplier: GreenForce"
},
{
"sectionNumber": "33",
"text": "Rule 40: Ignore Dossier-Redaction if Confidentiality is not set Dont redact Dossier-Redaction if Confidentiality is not set in file attributes Excepteur sint occaecat cupidatat non proident, myDossierRedaction sunt in culpa qui officia deserunt mollit anim id est laborum."
},
{
"sectionNumber": "34",
"text": "Rule 41/42: Redact Signatures Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No __________________________ __________________________ Signed by: Dilara Sonnenschein Signed by: Tobias Müller"
},
{
"sectionNumber": "35.1.1.3",
"text": "Rule 43: Redact Logo Redact Logo only if Vertebrate Study is Yes, else do nothing (skipped)"
},
{
"sectionNumber": "36",
"text": "This is a Page-Header"
},
{
"sectionNumber": "37",
"text": "This is a Page-Header"
},
{
"sectionNumber": "38",
"text": "This is a Page-Header"
},
{
"sectionNumber": "39",
"text": "This is a Page-Header"
},
{
"sectionNumber": "40",
"text": "This is a Page-Header"
},
{
"sectionNumber": "41",
"text": "This is a Page-Header"
},
{
"sectionNumber": "42",
"text": "This is a Page-Header"
},
{
"sectionNumber": "43",
"text": "This is a Page-Header"
},
{
"sectionNumber": "44",
"text": "This is a Page-Header"
},
{
"sectionNumber": "45",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "46",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "47",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "48",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "49",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "50",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "51",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "52",
"text": "This is a Page-Footer"
},
{
"sectionNumber": "53",
"text": "This is a Page-Footer"
}
]
}

View File

@ -1,5 +0,0 @@
rootProject.name = "search-service"
include(":search-service-api-v1")
include(":search-service-server-v1")
project(":search-service-api-v1").projectDir = file("search-service-v1/search-service-api-v1")
project(":search-service-server-v1").projectDir = file("search-service-v1/search-service-server-v1")