Compare commits

..

1 Commits

Author SHA1 Message Date
deiflaender
c8f1f1255e hotfix: Tested dense vector similarity 2023-04-06 15:47:30 +02:00
99 changed files with 639380 additions and 4914 deletions

9
.gitignore vendored
View File

@ -26,12 +26,3 @@
**/.DS_Store
**/classpath-data.json
**/dependencies-and-licenses-overview.txt
gradle.properties
gradlew
gradlew.bat
gradle/
**/.gradle
**/build

View File

@ -1,23 +0,0 @@
variables:
SONAR_PROJECT_KEY: 'RED_search-service'
include:
- project: 'gitlab/gitlab'
ref: 'main'
file: 'ci-templates/gradle_java.yml'
deploy:
stage: deploy
tags:
- dind
script:
- echo "Building with gradle version ${BUILDVERSION}"
- gradle -Pversion=${BUILDVERSION} publish
- gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=${BUILDVERSION}
- echo "BUILDVERSION=$BUILDVERSION" >> version.env
artifacts:
reports:
dotenv: version.env
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
- if: $CI_COMMIT_BRANCH =~ /^release/
- if: $CI_COMMIT_TAG

37
bamboo-specs/pom.xml Normal file
View File

@ -0,0 +1,37 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>com.atlassian.bamboo</groupId>
<artifactId>bamboo-specs-parent</artifactId>
<version>8.1.3</version>
<relativePath/>
</parent>
<artifactId>bamboo-specs</artifactId>
<version>1.0.0-SNAPSHOT</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>com.atlassian.bamboo</groupId>
<artifactId>bamboo-specs-api</artifactId>
</dependency>
<dependency>
<groupId>com.atlassian.bamboo</groupId>
<artifactId>bamboo-specs</artifactId>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<!-- run 'mvn test' to perform offline validation of the plan -->
<!-- run 'mvn -Ppublish-specs' to upload the plan to your Bamboo server -->
</project>

View File

@ -0,0 +1,129 @@
package buildjob;
import static com.atlassian.bamboo.specs.builders.task.TestParserTask.createJUnitParserTask;
import java.time.LocalTime;
import com.atlassian.bamboo.specs.api.BambooSpec;
import com.atlassian.bamboo.specs.api.builders.BambooKey;
import com.atlassian.bamboo.specs.api.builders.Variable;
import com.atlassian.bamboo.specs.api.builders.docker.DockerConfiguration;
import com.atlassian.bamboo.specs.api.builders.permission.PermissionType;
import com.atlassian.bamboo.specs.api.builders.permission.Permissions;
import com.atlassian.bamboo.specs.api.builders.permission.PlanPermissions;
import com.atlassian.bamboo.specs.api.builders.plan.Job;
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
import com.atlassian.bamboo.specs.api.builders.plan.PlanIdentifier;
import com.atlassian.bamboo.specs.api.builders.plan.Stage;
import com.atlassian.bamboo.specs.api.builders.plan.branches.BranchCleanup;
import com.atlassian.bamboo.specs.api.builders.plan.branches.PlanBranchManagement;
import com.atlassian.bamboo.specs.api.builders.project.Project;
import com.atlassian.bamboo.specs.builders.task.CheckoutItem;
import com.atlassian.bamboo.specs.builders.task.InjectVariablesTask;
import com.atlassian.bamboo.specs.builders.task.ScriptTask;
import com.atlassian.bamboo.specs.builders.task.VcsCheckoutTask;
import com.atlassian.bamboo.specs.builders.task.VcsTagTask;
import com.atlassian.bamboo.specs.builders.trigger.BitbucketServerTrigger;
import com.atlassian.bamboo.specs.builders.trigger.ScheduledTrigger;
import com.atlassian.bamboo.specs.model.task.InjectVariablesScope;
import com.atlassian.bamboo.specs.model.task.ScriptTaskProperties.Location;
import com.atlassian.bamboo.specs.util.BambooServer;
/**
* Plan configuration for Bamboo.
* Learn more on: <a href="https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs">https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs</a>
*/
@BambooSpec
public class PlanSpec {
private static final String SERVICE_NAME = "search-service";
private static final String JVM_ARGS = " -Xmx4g -XX:+ExitOnOutOfMemoryError -XX:SurvivorRatio=2 -XX:NewRatio=1 -XX:InitialTenuringThreshold=16 -XX:MaxTenuringThreshold=16 -XX:InitiatingHeapOccupancyPercent=35 ";
private static final String SERVICE_KEY = SERVICE_NAME.toUpperCase().replaceAll("-", "");
/**
* Run main to publish plan on Bamboo
*/
public static void main(final String[] args) throws Exception {
//By default credentials are read from the '.credentials' file.
BambooServer bambooServer = new BambooServer("http://localhost:8085");
Plan plan = new PlanSpec().createPlan();
bambooServer.publish(plan);
PlanPermissions planPermission = new PlanSpec().createPlanPermission(plan.getIdentifier());
bambooServer.publish(planPermission);
Plan secPlan = new PlanSpec().createSecBuild();
bambooServer.publish(secPlan);
PlanPermissions secPlanPermission = new PlanSpec().createPlanPermission(secPlan.getIdentifier());
bambooServer.publish(secPlanPermission);
}
private PlanPermissions createPlanPermission(PlanIdentifier planIdentifier) {
Permissions permission = new Permissions().userPermissions("atlbamboo",
PermissionType.EDIT,
PermissionType.VIEW,
PermissionType.ADMIN,
PermissionType.CLONE,
PermissionType.BUILD)
.groupPermissions("development", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
.groupPermissions("devplant", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
.loggedInUserPermissions(PermissionType.VIEW)
.anonymousUserPermissionView();
return new PlanPermissions(planIdentifier.getProjectKey(), planIdentifier.getPlanKey()).permissions(permission);
}
private Project project() {
return new Project().name("RED").key(new BambooKey("RED"));
}
public Plan createPlan() {
return new Plan(project(), SERVICE_NAME, new BambooKey(SERVICE_KEY)).description("Plan created from (enter repository url of your plan)")
.variables(new Variable("maven_add_param", ""))
.stages(new Stage("Default Stage").jobs(new Job("Default Job", new BambooKey("JOB1")).tasks(new ScriptTask().description("Clean")
.inlineBody("#!/bin/bash\n" + "set -e\n" + "rm -rf ./*"),
new VcsCheckoutTask().description("Checkout Default Repository").cleanCheckout(true).checkoutItems(new CheckoutItem().defaultRepository()),
new ScriptTask().description("Build").location(Location.FILE).fileFromPath("bamboo-specs/src/main/resources/scripts/build-java.sh").argument(SERVICE_NAME),
createJUnitParserTask().description("Resultparser")
.resultDirectories("**/test-reports/*.xml, **/target/surefire-reports/*.xml, **/target/failsafe-reports/*.xml")
.enabled(true),
new InjectVariablesTask().description("Inject git Tag").path("git.tag").namespace("g").scope(InjectVariablesScope.LOCAL),
new VcsTagTask().description("${bamboo.g.gitTag}").tagName("${bamboo.g.gitTag}").defaultRepository())
.dockerConfiguration(new DockerConfiguration().image("nexus.iqser.com:5001/infra/maven:3.8.4-openjdk-17-slim")
.dockerRunArguments("--net=host")
.volume("/etc/maven/settings.xml", "/usr/share/maven/conf/settings.xml")
.volume("/var/run/docker.sock", "/var/run/docker.sock"))))
.linkedRepositories("RED / " + SERVICE_NAME)
.triggers(new BitbucketServerTrigger())
.planBranchManagement(new PlanBranchManagement().createForVcsBranch()
.delete(new BranchCleanup().whenInactiveInRepositoryAfterDays(14))
.notificationForCommitters());
}
public Plan createSecBuild() {
return new Plan(project(), SERVICE_NAME + "-Sec", new BambooKey(SERVICE_KEY + "SEC")).description("Security Analysis Plan")
.stages(new Stage("Default Stage").jobs(new Job("Default Job", new BambooKey("JOB1")).tasks(new ScriptTask().description("Clean")
.inlineBody("#!/bin/bash\n" + "set -e\n" + "rm -rf ./*"),
new VcsCheckoutTask().description("Checkout Default Repository").cleanCheckout(true).checkoutItems(new CheckoutItem().defaultRepository()),
new ScriptTask().description("Sonar").location(Location.FILE).fileFromPath("bamboo-specs/src/main/resources/scripts/sonar-java.sh").argument(SERVICE_NAME))
.dockerConfiguration(new DockerConfiguration().image("nexus.iqser.com:5001/infra/maven:3.8.4-openjdk-17-slim")
.dockerRunArguments("--net=host")
.volume("/etc/maven/settings.xml", "/usr/share/maven/conf/settings.xml")
.volume("/var/run/docker.sock", "/var/run/docker.sock"))))
.linkedRepositories("RED / " + SERVICE_NAME)
.triggers(new ScheduledTrigger().scheduleOnceDaily(LocalTime.of(23, 00)))
.planBranchManagement(new PlanBranchManagement().createForVcsBranchMatching("release.*").notificationForCommitters());
}
}

View File

@ -0,0 +1,60 @@
#!/bin/bash
set -e
SERVICE_NAME=$1
if [[ "$bamboo_planRepository_branchName" == "master" ]]
then
branchVersion=$(cat pom.xml | grep -Eo " <version>.*-SNAPSHOT</version>" | sed -s 's|<version>\(.*\)\..*\(-*.*\)</version>|\1|' | tr -d ' ')
latestVersion=$( semver $(git tag -l "${branchVersion}.*" ) | tail -n1 )
newVersion="$(semver $latestVersion -p -i minor)"
elif [[ "$bamboo_planRepository_branchName" == release* ]]
then
branchVersion=$(echo $bamboo_planRepository_branchName | sed -s 's|release\/\([0-9]\+\.[0-9]\+\)\.x|\1|')
latestVersion=$( semver $(git tag -l "${branchVersion}.*" ) | tail -n1 )
newVersion="$(semver $latestVersion -p -i patch)"
elif [[ "${bamboo_version_tag}" != "dev" ]]
then
newVersion="${bamboo_version_tag}"
else
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
--no-transfer-progress \
${bamboo_maven_add_param} \
clean install \
-Djava.security.egd=file:/dev/./urandomelse
echo "gitTag=${bamboo_planRepository_1_branch}_${bamboo_buildNumber}" > git.tag
exit 0
fi
echo "gitTag=${newVersion}" > git.tag
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
${bamboo_maven_add_param} \
versions:set \
-DnewVersion=${newVersion}
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
${bamboo_maven_add_param} \
versions:set \
-DnewVersion=${newVersion}
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
--no-transfer-progress \
clean deploy \
${bamboo_maven_add_param} \
-e \
-DdeployAtEnd=true \
-Dmaven.wagon.http.ssl.insecure=true \
-Dmaven.wagon.http.ssl.allowall=true \
-Dmaven.wagon.http.ssl.ignore.validity.dates=true \
-DaltDeploymentRepository=iqser_release::default::https://nexus.iqser.com/repository/red-platform-releases
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
package
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
docker:push

View File

@ -0,0 +1,44 @@
#!/bin/bash
set -e
SERVICE_NAME=$1
echo "build jar binaries"
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
--no-transfer-progress \
clean install \
-Djava.security.egd=file:/dev/./urandomelse
echo "dependency-check:aggregate"
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
org.owasp:dependency-check-maven:aggregate
if [[ -z "${bamboo_repository_pr_key}" ]]
then
echo "Sonar Scan for branch: ${bamboo_planRepository_1_branch}"
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
sonar:sonar \
-Dsonar.projectKey=RED_$SERVICE_NAME \
-Dsonar.host.url=https://sonarqube.iqser.com \
-Dsonar.login=${bamboo_sonarqube_api_token_secret} \
-Dsonar.branch.name=${bamboo_planRepository_1_branch} \
-Dsonar.dependencyCheck.jsonReportPath=target/dependency-check-report.json \
-Dsonar.dependencyCheck.xmlReportPath=target/dependency-check-report.xml \
-Dsonar.dependencyCheck.htmlReportPath=target/dependency-check-report.html
else
echo "Sonar Scan for PR with key1: ${bamboo_repository_pr_key}"
mvn --no-transfer-progress \
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
sonar:sonar \
-Dsonar.projectKey=RED_$SERVICE_NAME \
-Dsonar.host.url=https://sonarqube.iqser.com \
-Dsonar.login=${bamboo_sonarqube_api_token_secret} \
-Dsonar.pullrequest.key=${bamboo_repository_pr_key} \
-Dsonar.pullrequest.branch=${bamboo_repository_pr_sourceBranch} \
-Dsonar.pullrequest.base=${bamboo_repository_pr_targetBranch} \
-Dsonar.dependencyCheck.jsonReportPath=target/dependency-check-report.json \
-Dsonar.dependencyCheck.xmlReportPath=target/dependency-check-report.xml \
-Dsonar.dependencyCheck.htmlReportPath=target/dependency-check-report.html
fi

View File

@ -0,0 +1,21 @@
package buildjob;
import org.junit.Test;
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
import com.atlassian.bamboo.specs.api.exceptions.PropertiesValidationException;
import com.atlassian.bamboo.specs.api.util.EntityPropertiesBuilders;
public class PlanSpecTest {
@Test
public void checkYourPlanOffline() throws PropertiesValidationException {
Plan plan = new PlanSpec().createPlan();
EntityPropertiesBuilders.build(plan);
Plan secPlan = new PlanSpec().createSecBuild();
EntityPropertiesBuilders.build(secPlan);
}
}

View File

@ -1,7 +0,0 @@
plugins {
`kotlin-dsl`
}
repositories {
gradlePluginPortal()
}

View File

@ -1,60 +0,0 @@
plugins {
`java-library`
`maven-publish`
pmd
checkstyle
jacoco
}
repositories {
mavenLocal()
mavenCentral()
maven {
url = uri("https://nexus.knecon.com/repository/gindev/");
credentials {
username = providers.gradleProperty("mavenUser").getOrNull();
password = providers.gradleProperty("mavenPassword").getOrNull();
}
}
}
group = "com.iqser.red.service"
java.sourceCompatibility = JavaVersion.VERSION_17
java.targetCompatibility = JavaVersion.VERSION_17
pmd {
isConsoleOutput = true
}
tasks.pmdMain {
pmd.ruleSetFiles = files("${rootDir}/config/pmd/pmd.xml")
}
tasks.pmdTest {
pmd.ruleSetFiles = files("${rootDir}/config/pmd/test_pmd.xml")
}
tasks.named<Test>("test") {
useJUnitPlatform()
reports {
junitXml.outputLocation.set(layout.buildDirectory.dir("reports/junit"))
}
}
tasks.test {
finalizedBy(tasks.jacocoTestReport) // report is always generated after tests run
}
tasks.jacocoTestReport {
dependsOn(tasks.test) // tests are required to run before generating the report
reports {
xml.required.set(true)
csv.required.set(false)
html.outputLocation.set(layout.buildDirectory.dir("jacocoHtml"))
}
}
java {
withJavadocJar()
}

View File

@ -1,39 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
<module name="Checker">
<property
name="severity"
value="error"/>
<module name="TreeWalker">
<module name="SuppressWarningsHolder"/>
<module name="MissingDeprecated"/>
<module name="MissingOverride"/>
<module name="AnnotationLocation"/>
<module name="JavadocStyle"/>
<module name="NonEmptyAtclauseDescription"/>
<module name="IllegalImport"/>
<module name="RedundantImport"/>
<module name="RedundantModifier"/>
<module name="EmptyBlock"/>
<module name="DefaultComesLast"/>
<module name="EmptyStatement"/>
<module name="EqualsHashCode"/>
<module name="ExplicitInitialization"/>
<module name="IllegalInstantiation"/>
<module name="ModifiedControlVariable"/>
<module name="MultipleVariableDeclarations"/>
<module name="PackageDeclaration"/>
<module name="ParameterAssignment"/>
<module name="SimplifyBooleanExpression"/>
<module name="SimplifyBooleanReturn"/>
<module name="StringLiteralEquality"/>
<module name="OneStatementPerLine"/>
<module name="FinalClass"/>
<module name="ArrayTypeStyle"/>
<module name="UpperEll"/>
<module name="OuterTypeFilename"/>
</module>
<module name="FileTabCharacter"/>
<module name="SuppressWarningsFilter"/>
</module>

View File

@ -1,20 +0,0 @@
<?xml version="1.0"?>
<ruleset name="Custom ruleset"
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
<description>
Knecon ruleset checks the code for bad stuff
</description>
<rule ref="category/java/errorprone.xml">
<exclude name="MissingSerialVersionUID"/>
<exclude name="AvoidLiteralsInIfCondition"/>
<exclude name="AvoidDuplicateLiterals"/>
<exclude name="NullAssignment"/>
<exclude name="AssignmentInOperand"/>
<exclude name="BeanMembersShouldSerialize"/>
</rule>
</ruleset>

View File

@ -1,22 +0,0 @@
<?xml version="1.0"?>
<ruleset name="Custom ruleset"
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
<description>
Knecon test ruleset checks the code for bad stuff
</description>
<rule ref="category/java/errorprone.xml">
<exclude name="MissingSerialVersionUID"/>
<exclude name="AvoidLiteralsInIfCondition"/>
<exclude name="AvoidDuplicateLiterals"/>
<exclude name="NullAssignment"/>
<exclude name="AssignmentInOperand"/>
<exclude name="TestClassWithoutTestCases"/>
<exclude name="BeanMembersShouldSerialize"/>
</rule>
</ruleset>

View File

@ -1 +0,0 @@
version = 2.0-SNAPSHOT

21
pom.xml Normal file
View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<module>bamboo-specs</module>
<module>search-service-v1</module>
<module>search-service-image-v1</module>
</modules>
</project>

View File

@ -1,15 +0,0 @@
#!/bin/bash
dir=${PWD##*/}
gradle assemble
# Get the current Git branch
branch=$(git rev-parse --abbrev-ref HEAD)
# Get the short commit hash (first 5 characters)
commit_hash=$(git rev-parse --short=5 HEAD)
# Combine branch and commit hash
buildName="${USER}-${branch}-${commit_hash}"
gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=$buildName
echo "nexus.knecon.com:5001/red/${dir}-server-v1:$buildName"

View File

@ -1,6 +0,0 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": [
"config:base"
]
}

View File

@ -0,0 +1,98 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>com.iqser.red</groupId>
<artifactId>platform-docker-dependency</artifactId>
<version>1.2.0</version>
<relativePath/>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service-image-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>1.0-SNAPSHOT</version>
<packaging>pom</packaging>
<properties>
<service.server>search-service-server-v1</service.server>
<platform.jar>${service.server}.jar</platform.jar>
<docker.skip.push>false</docker.skip.push>
<docker.image.name>${docker.image.prefix}/${service.server}</docker.image.name>
</properties>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
</plugin>
</plugins>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>download-platform-jar</id>
<phase>prepare-package</phase>
<goals>
<goal>copy</goal>
</goals>
<configuration>
<artifactItems>
<dependency>
<groupId>${project.groupId}</groupId>
<artifactId>${service.server}</artifactId>
<version>${project.version}</version>
<type>jar</type>
<overWrite>true</overWrite>
<destFileName>${platform.jar}</destFileName>
</dependency>
</artifactItems>
<outputDirectory>${docker.build.directory}</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>io.fabric8</groupId>
<artifactId>docker-maven-plugin</artifactId>
<configuration>
<images>

</images>
</configuration>
</plugin>
</plugins>
</pluginManagement>
</build>
</project>

View File

@ -0,0 +1,7 @@
FROM red/base-image:2.0.2
ARG PLATFORM_JAR
ENV PLATFORM_JAR ${PLATFORM_JAR}
COPY ["${PLATFORM_JAR}", "/"]

99
search-service-v1/pom.xml Normal file
View File

@ -0,0 +1,99 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>platform-dependency</artifactId>
<groupId>com.iqser.red</groupId>
<version>1.17.0</version>
<relativePath/>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>2.0-SNAPSHOT</version>
<packaging>pom</packaging>
<modules>
<module>search-service-api-v1</module>
<module>search-service-server-v1</module>
</modules>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.iqser.red</groupId>
<artifactId>platform-commons-dependency</artifactId>
<version>1.22.0</version>
<scope>import</scope>
<type>pom</type>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<pluginManagement>
<plugins>
<plugin>
<groupId>org.sonarsource.scanner.maven</groupId>
<artifactId>sonar-maven-plugin</artifactId>
<version>3.9.0.2155</version>
</plugin>
<plugin>
<groupId>org.owasp</groupId>
<artifactId>dependency-check-maven</artifactId>
<version>6.3.1</version>
<configuration>
<format>ALL</format>
</configuration>
</plugin>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.8</version>
<executions>
<execution>
<id>prepare-agent</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>report</id>
<goals>
<goal>report-aggregate</goal>
</goals>
<phase>verify</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -1,28 +0,0 @@
plugins {
id("com.iqser.red.service.java-conventions")
id("io.freefair.lombok") version "8.4"
}
description = "search-service-api-v1"
dependencies {
implementation("org.springframework:spring-web:6.0.6")
testImplementation("org.springframework.boot:spring-boot-starter-test:3.1.5")
}
publishing {
publications {
create<MavenPublication>(name) {
from(components["java"])
}
}
repositories {
maven {
url = uri("https://nexus.knecon.com/repository/red-platform-releases/")
credentials {
username = providers.gradleProperty("mavenUser").getOrNull();
password = providers.gradleProperty("mavenPassword").getOrNull();
}
}
}
}

View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>2.0-SNAPSHOT</version>
</parent>
<artifactId>search-service-api-v1</artifactId>
<dependencies>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
<optional>true</optional>
</dependency>
</dependencies>
</project>

View File

@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor
public class MatchedSection {
private String sectionNumber;
private int sectionNumber;
private String headline;
@Builder.Default

View File

@ -3,7 +3,6 @@ package com.iqser.red.service.search.v1.resources;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.ResponseStatus;
@ -18,7 +17,7 @@ public interface SearchResource {
@ResponseBody
@ResponseStatus(value = HttpStatus.OK)
@PostMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
@GetMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest);
}

View File

@ -1,73 +0,0 @@
import org.springframework.boot.gradle.tasks.bundling.BootBuildImage
plugins {
application
id("com.iqser.red.service.java-conventions")
id("org.springframework.boot") version "3.1.5"
id("io.spring.dependency-management") version "1.1.3"
id("org.sonarqube") version "4.4.1.3373"
id("io.freefair.lombok") version "8.4"
}
description = "search-service-server-v1"
configurations {
all {
exclude(group = "commons-logging", module = "commons-logging")
exclude(group = "org.springframework.boot", module = "spring-boot-starter-log4j2")
exclude(group = "com.iqser.red.commons", module = "logging-commons")
}
}
val springBootStarterVersion = "3.1.5"
dependencies {
api("com.knecon.fforesight:tenant-commons:0.30.0")
api("com.knecon.fforesight:tracing-commons:0.5.0")
api("com.knecon.fforesight:lifecycle-commons:0.6.0")
api("com.google.guava:guava:31.1-jre")
api("com.iqser.red.commons:storage-commons:2.45.0")
api(project(":search-service-api-v1"))
api("com.iqser.red.service:persistence-service-internal-api-v1:2.576.0-RED10106.0")
api("com.iqser.red.commons:spring-commons:2.1.0")
api("com.iqser.red.commons:metric-commons:2.1.0")
api("com.iqser.red.commons:jackson-commons:2.1.0")
api("co.elastic.clients:elasticsearch-java:8.6.2")
api("org.opensearch.client:opensearch-rest-client:2.6.0")
api("org.opensearch.client:opensearch-java:2.3.0")
api("jakarta.json:jakarta.json-api:2.1.1")
api("org.springframework.cloud:spring-cloud-starter-openfeign:4.0.4")
api("org.springframework.boot:spring-boot-starter-aop:${springBootStarterVersion}")
api("org.springframework.boot:spring-boot-starter-amqp:${springBootStarterVersion}")
api("net.logstash.logback:logstash-logback-encoder:7.4")
api("ch.qos.logback:logback-classic")
testImplementation("org.springframework.boot:spring-boot-starter-test:${springBootStarterVersion}")
testImplementation("com.iqser.red.commons:test-commons:2.1.0")
testImplementation("org.testcontainers:elasticsearch:1.17.6")
testImplementation("org.opensearch:opensearch-testcontainers:2.0.0")
testImplementation("org.springframework.amqp:spring-rabbit-test:3.0.2")
}
tasks.named<BootBuildImage>("bootBuildImage") {
environment.put("BPE_DELIM_JAVA_TOOL_OPTIONS", " ")
environment.put("BPE_APPEND_JAVA_TOOL_OPTIONS", "-Dfile.encoding=UTF-8")
imageName.set("nexus.knecon.com:5001/red/${project.name}:${project.version}")
if (project.hasProperty("buildbootDockerHostNetwork")) {
network.set("host")
}
docker {
if (project.hasProperty("buildbootDockerHostNetwork")) {
bindHostToBuilder.set(true)
}
verboseLogging.set(true)
publishRegistry {
username.set(providers.gradleProperty("mavenUser").getOrNull())
password.set(providers.gradleProperty("mavenPassword").getOrNull())
email.set(providers.gradleProperty("mavenEmail").getOrNull())
url.set("https://nexus.knecon.com:5001/")
}
}
}

View File

@ -0,0 +1,198 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<artifactId>search-service-v1</artifactId>
<groupId>com.iqser.red.service</groupId>
<version>2.0-SNAPSHOT</version>
</parent>
<artifactId>search-service-server-v1</artifactId>
<properties>
<persistence-service.version>2.0.12</persistence-service.version>
</properties>
<dependencies>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>31.1-jre</version>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>storage-commons</artifactId>
</dependency>
<dependency>
<groupId>com.iqser.red.service</groupId>
<artifactId>search-service-api-v1</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.iqser.red.service</groupId>
<artifactId>persistence-service-internal-api-v1</artifactId>
<version>${persistence-service.version}</version>
</dependency>
<!-- commons -->
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>spring-commons</artifactId>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>logging-commons</artifactId>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>metric-commons</artifactId>
</dependency>
<!-- other external -->
<dependency>
<!-- this dependency is necessary to work with java8 zoned timestamps -->
<groupId>com.iqser.red.commons</groupId>
<artifactId>jackson-commons</artifactId>
</dependency>
<dependency>
<groupId>co.elastic.clients</groupId>
<artifactId>elasticsearch-java</artifactId>
<version>8.6.2</version>
</dependency>
<dependency>
<groupId>org.opensearch.client</groupId>
<artifactId>opensearch-rest-client</artifactId>
<version>2.6.0</version>
</dependency>
<dependency>
<groupId>org.opensearch.client</groupId>
<artifactId>opensearch-java</artifactId>
<version>2.0.0</version>
</dependency>
<dependency>
<groupId>jakarta.json</groupId>
<artifactId>jakarta.json-api</artifactId>
<version>2.0.1</version>
</dependency>
<!-- spring -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-openfeign</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-aop</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-amqp</artifactId>
<version>2.3.1.RELEASE</version>
</dependency>
<!-- test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.iqser.red.commons</groupId>
<artifactId>test-commons</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testcontainers</groupId>
<artifactId>elasticsearch</artifactId>
<version>1.16.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.opensearch</groupId>
<artifactId>opensearch-testcontainers</artifactId>
<version>2.0.0</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.amqp</groupId>
<artifactId>spring-rabbit-test</artifactId>
<version>2.3.1</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<annotationProcessors>
<annotationProcessor>lombok.launch.AnnotationProcessorHider$AnnotationProcessor</annotationProcessor>
<annotationProcessor>com.dslplatform.json.processor.CompiledJsonAnnotationProcessor</annotationProcessor>
</annotationProcessors>
</configuration>
</plugin>
<plugin>
<!-- generate git.properties for exposure in /info -->
<groupId>pl.project13.maven</groupId>
<artifactId>git-commit-id-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>revision</goal>
</goals>
<configuration>
<generateGitPropertiesFile>true</generateGitPropertiesFile>
<gitDescribe>
<tags>true</tags>
</gitDescribe>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<id>original-jar</id>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<classifier>original</classifier>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<!-- repackages the generated jar into a runnable fat-jar and makes it
executable -->
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
<configuration>
<executable>true</executable>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -2,31 +2,27 @@ package com.iqser.red.service.search.v1.server;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.context.annotation.Import;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.multitenancy.AsyncConfig;
import com.iqser.red.service.search.v1.server.multitenancy.MultiTenancyMessagingConfiguration;
import com.iqser.red.service.search.v1.server.multitenancy.MultiTenancyWebConfiguration;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.knecon.fforesight.lifecyclecommons.LifecycleAutoconfiguration;
import com.knecon.fforesight.tenantcommons.MultiTenancyAutoConfiguration;
import io.micrometer.core.aop.TimedAspect;
import io.micrometer.core.instrument.MeterRegistry;
@ImportAutoConfiguration({MultiTenancyAutoConfiguration.class, LifecycleAutoconfiguration.class})
@Import({StorageAutoConfiguration.class})
@Import({MultiTenancyWebConfiguration.class, AsyncConfig.class, MultiTenancyMessagingConfiguration.class})
@EnableFeignClients(basePackageClasses = FileStatusClient.class)
@EnableConfigurationProperties({ElasticsearchSettings.class, SearchServiceSettings.class})
@SpringBootApplication(exclude = {SecurityAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class})
@EnableAspectJAutoProxy
public class Application {
public static void main(String[] args) {

View File

@ -0,0 +1,10 @@
package com.iqser.red.service.search.v1.server.client;
import org.springframework.cloud.openfeign.FeignClient;
import com.iqser.red.service.persistence.service.v1.api.internal.resources.TenantsResource;
@FeignClient(name = "TenantsResource", url = "${persistence-service.url}")
public interface TenantsClient extends TenantsResource {
}

View File

@ -1,119 +0,0 @@
package com.iqser.red.service.search.v1.server.configuration;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.BindingBuilder;
import org.springframework.amqp.core.DirectExchange;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.QueueBuilder;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import lombok.RequiredArgsConstructor;
@Configuration
@RequiredArgsConstructor
public class MessagingConfiguration {
public static final String INDEXING_REQUEST_QUEUE_PREFIX = "indexing_request";
public static final String INDEXING_REQUEST_EXCHANGE = "indexing_request_exchange";
public static final String INDEXING_DLQ = "indexing_error";
public static final String DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX = "delete_from_index_request";
public static final String DELETE_FROM_INDEX_REQUEST_EXCHANGE = "delete_from_index_request_exchange";
public static final String DELETE_FROM_INDEX_DLQ = "delete_from_index_error";
public static final String X_ERROR_INFO_HEADER = "x-error-message";
public static final String X_ERROR_INFO_TIMESTAMP_HEADER = "x-error-message-timestamp";
@Value("${fforesight.multitenancy.tenant-delete-queue:search-service-tenant-delete}")
private String tenantDeleteEventQueueName;
@Value("${fforesight.multitenancy.tenant-delete-dlq:search-service-tenant-delete-error}")
private String tenantDeleteDLQName;
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
private String tenantUpdatedEventQueueName;
@Value("${fforesight.multitenancy.tenant-updated-dlq:search-service-tenant-updated-error}")
private String tenantUpdatedDLQName;
@Bean
public DirectExchange indexingRequestExchange() {
return new DirectExchange(INDEXING_REQUEST_EXCHANGE);
}
@Bean
public Queue indexingDLQ() {
return QueueBuilder.durable(INDEXING_DLQ).build();
}
@Bean
public DirectExchange deleteFromIndexRequestExchange() {
return new DirectExchange(DELETE_FROM_INDEX_REQUEST_EXCHANGE);
}
@Bean
public Queue deleteFromIndexDLQ() {
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
}
@Bean
public Binding tenantExchangeDeleteBinding(@Qualifier("tenantUserManagementTenantDeleteQueue") Queue tenantUserManagementTenantDeleteQueue,
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
return BindingBuilder.bind(tenantUserManagementTenantDeleteQueue).to(tenantExchange).with("tenant.delete");
}
@Bean("tenantUserManagementTenantDeleteQueue")
public Queue tenantDeleteQueue() {
return QueueBuilder.durable(this.tenantDeleteEventQueueName)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", this.tenantDeleteDLQName)
.build();
}
@Bean
public Queue tenantDeleteDLQ() {
return QueueBuilder.durable(this.tenantDeleteDLQName).build();
}
@Bean
public Binding tenantExchangeUpdatedBinding(@Qualifier("tenantUserManagementTenantUpdatedQueue") Queue tenantUserManagementTenantUpdatedQueue,
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
return BindingBuilder.bind(tenantUserManagementTenantUpdatedQueue).to(tenantExchange).with("tenant.updated");
}
@Bean("tenantUserManagementTenantUpdatedQueue")
public Queue tenantUpdatedQueue() {
return QueueBuilder.durable(this.tenantUpdatedEventQueueName)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", this.tenantUpdatedDLQName)
.build();
}
@Bean
public Queue tenantUpdatedDLQ() {
return QueueBuilder.durable(this.tenantUpdatedDLQName).build();
}
}

View File

@ -1,11 +0,0 @@
package com.iqser.red.service.search.v1.server.configuration;
import org.springframework.context.annotation.Configuration;
import com.knecon.fforesight.tenantcommons.queue.TenantMessagingConfiguration;
@Configuration
public class TenantMessagingConfigurationImpl extends TenantMessagingConfiguration {
}

View File

@ -1,36 +0,0 @@
package com.iqser.red.service.search.v1.server.controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import com.iqser.red.service.search.v1.model.SearchRequest;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.resources.SearchResource;
import com.iqser.red.service.search.v1.server.service.SearchService;
import lombok.RequiredArgsConstructor;
@RestController
@RequiredArgsConstructor
public class SearchController implements SearchResource {
private final SearchService searchService;
public SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest) {
return searchService.search(searchRequest.getQueryString(),
searchRequest.getDossierTemplateIds(),
searchRequest.getDossierIds(),
searchRequest.getFileId(),
searchRequest.getAssignee(),
searchRequest.isIncludeDeletedDossiers(),
searchRequest.isIncludeArchivedDossiers(),
searchRequest.getWorkflowStatus(),
searchRequest.getFileAttributes(),
searchRequest.getPage(),
searchRequest.getPageSize(),
searchRequest.isReturnSections());
}
}

View File

@ -1,49 +0,0 @@
package com.iqser.red.service.search.v1.server.migration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationContext;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.model.IndexMessage;
import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@SuppressWarnings("PMD")
public class MigrationStarterService {
private final ApplicationContext ctx;
private final IndexInformationService indexInformationService;
private final IndexingMessageReceiver indexingMessageReceiver;
private final SearchServiceSettings settings;
private final TenantsClient tenantsClient;
@EventListener(ApplicationReadyEvent.class)
public void migrate() {
// This can only run in post upgrade hook, because otherwise the old service is still runnnig.
if (settings.isMigrateOnly()) {
tenantsClient.getTenants().forEach(tenant -> {
TenantContext.setTenantId(tenant.getTenantId());
if (indexInformationService.hasIndexChanged()) {
log.info("Index has changed and will be closed, dropped, recreated and all files will be indexed");
indexingMessageReceiver.receiveIndexingRequest(IndexMessage.builder().messageType(IndexMessageType.DROP).build());
}
});
System.exit(SpringApplication.exit(ctx, () -> 0));
}
}
}

View File

@ -1,7 +1,6 @@
package com.iqser.red.service.search.v1.server.model;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
import lombok.AllArgsConstructor;
import lombok.Builder;

View File

@ -0,0 +1,16 @@
package com.iqser.red.service.search.v1.server.model;
import java.util.Map;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class Embeddings {
private Map<String, Float[]> embeddings;
}

View File

@ -4,6 +4,7 @@ import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Set;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
@ -18,21 +19,18 @@ import lombok.NoArgsConstructor;
@SuppressWarnings("serial")
public class IndexDocument implements Serializable {
private String dossierTemplateId;
private String dossierId;
private String fileId;
private String filename;
private int sectionNumber;
private Date date;
private String assignee;
private boolean dossierDeleted;
private boolean dossierArchived;
private String workflowStatus;
@Builder.Default
private List<IndexSection> sections = new ArrayList<>();
private String text;
private Set<Integer> pages;
private String headline;
@Builder.Default
private List<IndexFileAttribute> fileAttributes = new ArrayList<>();
private Float[] wordEmbeddingsVector = new Float[383];
}

View File

@ -16,7 +16,7 @@ import lombok.NoArgsConstructor;
@SuppressWarnings("serial")
public class IndexSection implements Serializable {
private String sectionNumber;
private int sectionNumber;
private String text;
private Set<Integer> pages;
private String headline;

View File

@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
@AllArgsConstructor
public class SectionText {
private String sectionNumber;
private int sectionNumber;
private String headline;
private String text;

View File

@ -3,11 +3,14 @@ package com.iqser.red.service.search.v1.server.model;
import java.util.ArrayList;
import java.util.List;
import com.dslplatform.json.CompiledJson;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@CompiledJson
@NoArgsConstructor
@AllArgsConstructor
public class Text {

View File

@ -0,0 +1,27 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import java.util.concurrent.Executor;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.AsyncConfigurerSupport;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
@Configuration
public class AsyncConfig extends AsyncConfigurerSupport {
@Override
public Executor getAsyncExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setCorePoolSize(7);
executor.setMaxPoolSize(42);
executor.setQueueCapacity(11);
executor.setThreadNamePrefix("TenantAwareTaskExecutor-");
executor.setTaskDecorator(new TenantAwareTaskDecorator());
executor.initialize();
return executor;
}
}

View File

@ -0,0 +1,105 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.security.SecureRandom;
import java.security.spec.KeySpec;
import java.util.Base64;
import javax.annotation.PostConstruct;
import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.GCMParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import lombok.SneakyThrows;
@Service
public class EncryptionDecryptionService {
@Value("${search-service.crypto.key:redaction}")
private String key;
private SecretKey secretKey;
private byte[] iv;
@SneakyThrows
@PostConstruct
protected void postConstruct() {
SecureRandom secureRandom = new SecureRandom();
iv = new byte[12];
secureRandom.nextBytes(iv);
secretKey = generateSecretKey(key, iv);
}
@SneakyThrows
public String encrypt(String strToEncrypt) {
return Base64.getEncoder().encodeToString(encrypt(strToEncrypt.getBytes()));
}
@SneakyThrows
public String decrypt(String strToDecrypt) {
byte[] bytes = Base64.getDecoder().decode(strToDecrypt);
return new String(decrypt(bytes), StandardCharsets.UTF_8);
}
@SneakyThrows
public byte[] encrypt(byte[] data) {
Cipher cipher = Cipher.getInstance("AES/GCM/NoPadding");
GCMParameterSpec parameterSpec = new GCMParameterSpec(128, iv);
cipher.init(Cipher.ENCRYPT_MODE, secretKey, parameterSpec);
byte[] encryptedData = cipher.doFinal(data);
ByteBuffer byteBuffer = ByteBuffer.allocate(4 + iv.length + encryptedData.length);
byteBuffer.putInt(iv.length);
byteBuffer.put(iv);
byteBuffer.put(encryptedData);
return byteBuffer.array();
}
@SneakyThrows
public byte[] decrypt(byte[] encryptedData) {
ByteBuffer byteBuffer = ByteBuffer.wrap(encryptedData);
int noonceSize = byteBuffer.getInt();
if (noonceSize < 12 || noonceSize >= 16) {
throw new IllegalArgumentException("Nonce size is incorrect. Make sure that the incoming data is an AES encrypted file.");
}
byte[] iv = new byte[noonceSize];
byteBuffer.get(iv);
SecretKey secretKey = generateSecretKey(key, iv);
byte[] cipherBytes = new byte[byteBuffer.remaining()];
byteBuffer.get(cipherBytes);
Cipher cipher = Cipher.getInstance("AES/GCM/NoPadding");
GCMParameterSpec parameterSpec = new GCMParameterSpec(128, iv);
cipher.init(Cipher.DECRYPT_MODE, secretKey, parameterSpec);
return cipher.doFinal(cipherBytes);
}
@SneakyThrows
public SecretKey generateSecretKey(String password, byte[] iv) {
KeySpec spec = new PBEKeySpec(password.toCharArray(), iv, 65536, 128); // AES-128
SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
byte[] key = secretKeyFactory.generateSecret(spec).getEncoded();
return new SecretKeySpec(key, "AES");
}
}

View File

@ -0,0 +1,18 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import org.springframework.stereotype.Component;
import feign.RequestInterceptor;
import feign.RequestTemplate;
@Component
public class ForwardTenantInterceptor implements RequestInterceptor {
public static final String TENANT_HEADER_NAME = "X-TENANT-ID";
@Override
public void apply(RequestTemplate template) {
// do something
template.header(TENANT_HEADER_NAME, TenantContext.getTenantId());
}
}

View File

@ -0,0 +1,49 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import static com.iqser.red.service.search.v1.server.multitenancy.TenantInterceptor.TENANT_HEADER_NAME;
import org.springframework.amqp.rabbit.config.AbstractRabbitListenerContainerFactory;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class MultiTenancyMessagingConfiguration {
@Bean
public static BeanPostProcessor multitenancyBeanPostProcessor() {
return new BeanPostProcessor() {
@Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
if (bean instanceof RabbitTemplate) {
((RabbitTemplate) bean).setBeforePublishPostProcessors(m -> {
m.getMessageProperties().setHeader(TENANT_HEADER_NAME, TenantContext.getTenantId());
return m;
});
} else if (bean instanceof AbstractRabbitListenerContainerFactory) {
((AbstractRabbitListenerContainerFactory<?>) bean).setAfterReceivePostProcessors(m -> {
String tenant = m.getMessageProperties().getHeader(TENANT_HEADER_NAME);
if (tenant != null) {
TenantContext.setTenantId(tenant);
} else {
throw new RuntimeException("No Tenant is set queue message");
}
return m;
});
}
return bean;
}
};
}
}

View File

@ -0,0 +1,28 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import com.iqser.red.commons.spring.DefaultWebMvcConfiguration;
@Configuration
public class MultiTenancyWebConfiguration extends DefaultWebMvcConfiguration {
private final TenantInterceptor tenantInterceptor;
@Autowired
public MultiTenancyWebConfiguration(TenantInterceptor tenantInterceptor) {
this.tenantInterceptor = tenantInterceptor;
}
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addWebRequestInterceptor(tenantInterceptor);
}
}

View File

@ -0,0 +1,45 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.client.TenantsClient;
import com.iqser.red.storage.commons.model.AzureStorageConnection;
import com.iqser.red.storage.commons.model.S3StorageConnection;
import com.iqser.red.storage.commons.service.StorageConnectionProvider;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
public class StorageConnectionProviderImpl implements StorageConnectionProvider {
private final TenantsClient tenantsClient;
private final EncryptionDecryptionService encryptionDecryptionService;
@Override
public AzureStorageConnection getAzureStorageConnection(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
return AzureStorageConnection.builder()
.connectionString(encryptionDecryptionService.decrypt(tenant.getAzureStorageConnection().getConnectionString()))
.containerName(tenant.getAzureStorageConnection().getContainerName())
.build();
}
@Override
public S3StorageConnection getS3StorageConnection(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
return S3StorageConnection.builder()
.key(tenant.getS3StorageConnection().getKey())
.secret(encryptionDecryptionService.decrypt(tenant.getS3StorageConnection().getSecret()))
.signerType(tenant.getS3StorageConnection().getSignerType())
.bucketName(tenant.getS3StorageConnection().getBucketName())
.region(tenant.getS3StorageConnection().getRegion())
.endpoint(tenant.getS3StorageConnection().getEndpoint())
.build();
}
}

View File

@ -0,0 +1,23 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import org.springframework.core.task.TaskDecorator;
import org.springframework.lang.NonNull;
public class TenantAwareTaskDecorator implements TaskDecorator {
@Override
@NonNull
public Runnable decorate(@NonNull Runnable runnable) {
String tenantId = TenantContext.getTenantId();
return () -> {
try {
TenantContext.setTenantId(tenantId);
runnable.run();
} finally {
TenantContext.setTenantId(null);
}
};
}
}

View File

@ -0,0 +1,29 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public final class TenantContext {
private static InheritableThreadLocal<String> currentTenant = new InheritableThreadLocal<>();
public static void setTenantId(String tenantId) {
log.debug("Setting tenantId to " + tenantId);
currentTenant.set(tenantId);
}
public static String getTenantId() {
return currentTenant.get();
}
public static void clear() {
currentTenant.remove();
}
}

View File

@ -0,0 +1,35 @@
package com.iqser.red.service.search.v1.server.multitenancy;
import org.springframework.stereotype.Component;
import org.springframework.ui.ModelMap;
import org.springframework.web.context.request.WebRequest;
import org.springframework.web.context.request.WebRequestInterceptor;
@Component
public class TenantInterceptor implements WebRequestInterceptor {
public static final String TENANT_HEADER_NAME = "X-TENANT-ID";
@Override
public void preHandle(WebRequest request) {
if (request.getHeader(TENANT_HEADER_NAME) != null) {
TenantContext.setTenantId(request.getHeader(TENANT_HEADER_NAME));
}
}
@Override
public void postHandle(WebRequest request, ModelMap model) {
TenantContext.clear();
}
@Override
public void afterCompletion(WebRequest request, Exception ex) {
}
}

View File

@ -1,38 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class DeleteTenantMessageReceiver {
private final IndexDeleteService indexDeleteService;
@Value("${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
private String tenantDeleteQueue;
@PostConstruct
public void postConstruct() {
log.info("Listener for tenant-delete started for queue: {}", this.tenantDeleteQueue);
}
@RabbitListener(queues = "${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
public void deleteTenant(TenantResponse tenant) {
indexDeleteService.dropIndex(tenant.getSearchConnection());
}
}

View File

@ -1,236 +1,201 @@
package com.iqser.red.service.search.v1.server.queue;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_DLQ;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_REQUEST_EXCHANGE;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.time.temporal.ChronoUnit;
import java.util.List;
import org.springframework.amqp.AmqpRejectAndDontRequeueException;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.search.v1.model.IndexMessage;
import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.server.client.DossierClient;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
import com.iqser.red.service.search.v1.server.service.TextStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class IndexingMessageReceiver {
public static final String INDEXING_LISTENER_ID = "indexing-listener";
public static final String DELETE_FROM_INDEX_LISTENER_ID = "delete-from-index-listener";
private final ObjectMapper objectMapper;
private final TextStorageService textStorageService;
private final FileStatusClient fileStatusClient;
private final DossierClient dossierClient;
private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
private final RabbitTemplate rabbitTemplate;
private final DocumentDeleteService documentDeleteService;
private final DocumentUpdateService documentUpdateService;
private final DocumentIndexService documentIndexService;
private final IndexDeleteService indexDeleteService;
private final IndexInformationService indexInformationService;
private final IndexDocumentConverterService indexDocumentConverterService;
@SneakyThrows
@RabbitHandler
@RabbitListener(id = INDEXING_LISTENER_ID)
public void receiveIndexingRequest(Message message) {
var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
// This prevents from endless retries oom errors.
if (message.getMessageProperties().isRedelivered()) {
throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
indexRequest.getDossierId(),
indexRequest.getFileId()));
}
try {
receiveIndexingRequest(indexRequest);
} catch (Exception e) {
log.warn("An exception occurred in processing the indexing request stage: ", e);
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
throw e;
}
}
public void receiveIndexingRequest(IndexMessage indexRequest) {
log.info("Processing indexing request: {}", indexRequest);
FileModel fileStatus;
Dossier dossier;
switch (indexRequest.getMessageType()) {
case INSERT:
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
indexFile(dossier, fileStatus);
break;
case UPDATE:
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
if(documentUpdateService.documentExists(indexRequest.getFileId())) {var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
fileStatus.getWorkflowStatus().name(),
fileStatus.getFileAttributes());
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
log.info("Successfully updated {}", indexRequest);
} else {
indexFile(dossier, fileStatus);
}
break;
case DROP:
indexDeleteService.recreateIndex();
addAllDocumentsToIndexQueue();
indexInformationService.updateIndexInformation();
break;
default:
throw new IllegalArgumentException("MessageType '" + indexRequest.getMessageType() + "' does not exist");
}
}
@RabbitHandler
@RabbitListener(queues = INDEXING_DLQ)
public void receiveIndexingRequestDQL(Message in) throws IOException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
String errorLog = "Failed to process indexing request:";
log.info(errorLog + ": {}", indexRequest);
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
indexRequest.getFileId(),
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
}
@RabbitHandler
@RabbitListener(id = DELETE_FROM_INDEX_LISTENER_ID)
public void receiveDeleteDocumentRequest(Message in) throws IOException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
log.info("Processing delete document request: {}", indexRequest);
try {
documentDeleteService.deleteDocument(indexRequest.getFileId());
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
} catch (Exception e) {
log.warn("An exception occurred in processing delete document stage: {}", e.getMessage());
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
throw e;
}
}
@RabbitHandler
@RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
public void receiveDeleteDocumentRequestDLQ(Message in) throws IOException {
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
String errorLog = "Failed to process delete from index request ";
log.info(errorLog + ": {}", indexRequest);
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
indexRequest.getFileId(),
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
}
private void indexFile(Dossier dossier, FileModel file) {
fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
Text text = textStorageService.getText(dossier.getId(), file.getId());
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
dossier.getId(),
file.getId(),
file.getFilename(),
text,
file.getAssignee(),
dossier.getSoftDeletedTime() != null,
dossier.getArchivedTime() != null,
file.getWorkflowStatus(),
file.getFileAttributes());
documentIndexService.indexDocument(indexDocument);
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
}
private void addAllDocumentsToIndexQueue() {
var allDossiers = dossierClient.getAllDossiers(true, true);
for (Dossier dossier : allDossiers) {
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getDossierStatus(dossier.getId()));
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getSoftDeletedDossierStatus(dossier.getId()));
}
log.info("Successfully added all files from all dossiers to index queue (including archived and deleted)");
}
private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
for (FileModel file : files) {
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
rabbitTemplate.convertAndSend(INDEXING_REQUEST_EXCHANGE,
TenantContext.getTenantId(),
IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build(),
message -> {
message.getMessageProperties().setPriority(99);
return message;
});
}
}
}
//package com.iqser.red.service.search.v1.server.queue;
//
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
//import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
//
//import java.util.List;
//
//import org.springframework.amqp.AmqpRejectAndDontRequeueException;
//import org.springframework.amqp.core.Message;
//import org.springframework.amqp.rabbit.annotation.RabbitHandler;
//import org.springframework.amqp.rabbit.annotation.RabbitListener;
//import org.springframework.amqp.rabbit.core.RabbitTemplate;
//import org.springframework.stereotype.Service;
//
//import com.fasterxml.jackson.core.JsonProcessingException;
//import com.fasterxml.jackson.databind.ObjectMapper;
//import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
//import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
//import com.iqser.red.service.search.v1.model.IndexMessage;
//import com.iqser.red.service.search.v1.model.IndexMessageType;
//import com.iqser.red.service.search.v1.server.client.DossierClient;
//import com.iqser.red.service.search.v1.server.client.FileStatusClient;
//import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
//import com.iqser.red.service.search.v1.server.model.Text;
//import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
//import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
//import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
//import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
//import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
//import com.iqser.red.service.search.v1.server.service.IndexInformationService;
//import com.iqser.red.service.search.v1.server.service.TextStorageService;
//
//import lombok.RequiredArgsConstructor;
//import lombok.SneakyThrows;
//import lombok.extern.slf4j.Slf4j;
//
//@Slf4j
//@Service
//@RequiredArgsConstructor
//public class IndexingMessageReceiver {
//
// private final ObjectMapper objectMapper;
// private final TextStorageService textStorageService;
// private final FileStatusClient fileStatusClient;
// private final DossierClient dossierClient;
// private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
// private final RabbitTemplate rabbitTemplate;
//
// private final DocumentDeleteService documentDeleteService;
// private final DocumentUpdateService documentUpdateService;
// private final DocumentIndexService documentIndexService;
// private final IndexDeleteService indexDeleteService;
// private final IndexInformationService indexInformationService;
// private final IndexDocumentConverterService indexDocumentConverterService;
//
//
// @SneakyThrows
// @RabbitHandler
// @RabbitListener(queues = INDEXING_QUEUE)
// public void receiveIndexingRequest(Message message) {
//
// var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
//
// // This prevents from endless retries oom errors.
// if (message.getMessageProperties().isRedelivered()) {
// throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
// indexRequest.getDossierId(),
// indexRequest.getFileId()));
// }
//
// receiveIndexingRequest(indexRequest);
// }
//
//
// public void receiveIndexingRequest(IndexMessage indexRequest) {
//
// log.info("Processing indexing request: {}", indexRequest);
//
// switch (indexRequest.getMessageType()) {
// case INSERT:
// var fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
// var dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
// indexFile(dossier, fileStatus);
// break;
//
// case UPDATE:
// fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
// dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
//
// var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
// dossier.getSoftDeletedTime() != null,
// dossier.getArchivedTime() != null,
// fileStatus.getWorkflowStatus().name(),
// fileStatus.getFileAttributes());
//
// documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
// log.info("Successfully updated {}", indexRequest);
// break;
//
// case DROP:
// indexDeleteService.recreateIndex();
// addAllDocumentsToIndexQueue();
// try {
// indexInformationService.updateIndexInformation();
// } catch (Exception e) {
// log.error("Could not update index information", e);
// }
// break;
//
// default:
// throw new IllegalArgumentException("MessageType '" + indexRequest.getMessageType() + "' does not exist");
// }
//
// }
//
//
// @RabbitHandler
// @RabbitListener(queues = INDEXING_DQL)
// public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
//
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
// fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
// log.info("Failed to process indexing request: {}", indexRequest);
// }
//
//
// @RabbitHandler
// @RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
// public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
//
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
// log.info("Processing delete document request: {}", indexRequest);
// documentDeleteService.deleteDocument(indexRequest.getFileId());
// log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
//
// }
//
//
// @RabbitHandler
// @RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
// public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
//
// var indexRequest = objectMapper.readValue(in, IndexMessage.class);
// fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
// log.info("Failed to process delete from index request: {}", indexRequest);
// }
//
//
// private void indexFile(Dossier dossier, FileModel file) {
//
// fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
// Text text = textStorageService.getText(dossier.getId(), file.getId());
//
// var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
// dossier.getId(),
// file.getId(),
// file.getFilename(),
// text,
// file.getAssignee(),
//// dossier.getSoftDeletedTime() != null,
// dossier.getArchivedTime() != null,
// file.getWorkflowStatus(),
// file.getFileAttributes(), null);
//
// documentIndexService.indexDocument(indexDocument);
// fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
// log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
// }
//
//
// private void addAllDocumentsToIndexQueue() {
//
// var allDossiers = dossierClient.getAllDossiers(true, true);
// for (Dossier dossier : allDossiers) {
// addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getDossierStatus(dossier.getId()));
// addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getSoftDeletedDossierStatus(dossier.getId()));
// }
// log.info("Successfully added all files from all dossiers to index queue (including archived and deleted)");
// }
//
//
// private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
//
// for (FileModel file : files) {
// try {
// log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
// rabbitTemplate.convertAndSend(INDEXING_QUEUE,
// objectMapper.writeValueAsString(IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build()),
// message -> {
// message.getMessageProperties().setPriority(99);
// return message;
// });
// } catch (JsonProcessingException e) {
// throw new RuntimeException(e);
// }
// }
// }
//
//}

View File

@ -0,0 +1,52 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.QueueBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import lombok.RequiredArgsConstructor;
@Configuration
@RequiredArgsConstructor
public class MessagingConfiguration {
public static final String INDEXING_QUEUE = "indexingQueue";
public static final String INDEXING_DQL = "indexingDQL";
public static final String DELETE_FROM_INDEX_QUEUE = "deleteFromIndexQueue";
public static final String DELETE_FROM_INDEX_DLQ = "deleteFromIndexDLQ";
@Bean
public Queue indexingQueue() {
return QueueBuilder.durable(INDEXING_QUEUE).withArgument("x-dead-letter-exchange", "").withArgument("x-dead-letter-routing-key", INDEXING_DQL).maxPriority(2).build();
}
@Bean
public Queue indexingDeadLetterQueue() {
return QueueBuilder.durable(INDEXING_DQL).build();
}
@Bean
public Queue deleteFromIndexQueue() {
return QueueBuilder.durable(DELETE_FROM_INDEX_QUEUE)
.withArgument("x-dead-letter-exchange", "")
.withArgument("x-dead-letter-routing-key", DELETE_FROM_INDEX_DLQ)
.maxPriority(2)
.build();
}
@Bean
public Queue deleteFromIndexDLQ() {
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
}
}

View File

@ -1,74 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.*;
import java.util.Map;
import java.util.Set;
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Service;
import com.knecon.fforesight.tenantcommons.TenantProvider;
import com.knecon.fforesight.tenantcommons.model.TenantCreatedEvent;
import com.knecon.fforesight.tenantcommons.model.TenantQueueConfiguration;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import com.knecon.fforesight.tenantcommons.queue.RabbitQueueFromExchangeService;
import com.knecon.fforesight.tenantcommons.queue.TenantExchangeMessageReceiver;
@Service
public class TenantExchangeMessageReceiverImpl extends TenantExchangeMessageReceiver {
public TenantExchangeMessageReceiverImpl(RabbitQueueFromExchangeService rabbitQueueService, TenantProvider tenantProvider) {
super(rabbitQueueService, tenantProvider);
}
@Override
protected Set<TenantQueueConfiguration> getTenantQueueConfigs() {
return Set.of(TenantQueueConfiguration.builder()
.listenerId(IndexingMessageReceiver.INDEXING_LISTENER_ID)
.exchangeName(INDEXING_REQUEST_EXCHANGE)
.queuePrefix(INDEXING_REQUEST_QUEUE_PREFIX)
.dlqName(INDEXING_DLQ)
.arguments(Map.of("x-max-priority", 2))
.build(),
TenantQueueConfiguration.builder()
.listenerId(IndexingMessageReceiver.DELETE_FROM_INDEX_LISTENER_ID)
.exchangeName(DELETE_FROM_INDEX_REQUEST_EXCHANGE)
.queuePrefix(DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX)
.dlqName(DELETE_FROM_INDEX_DLQ)
.arguments(Map.of("x-max-priority", 2))
.build());
}
@EventListener(ApplicationReadyEvent.class)
public void onApplicationReady() {
System.out.println("application ready invoked");
super.initializeQueues();
}
@RabbitHandler
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantCreatedQueueName()}")
public void reactToTenantCreation(TenantCreatedEvent tenantCreatedEvent) {
super.reactToTenantCreation(tenantCreatedEvent);
}
@RabbitHandler
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantDeletedQueueName()}")
public void reactToTenantDeletion(TenantResponse tenantResponse) {
super.reactToTenantDeletion(tenantResponse);
}
}

View File

@ -1,48 +0,0 @@
package com.iqser.red.service.search.v1.server.queue;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
public class UpdatedTenantMessageReceiver {
private final IndexQueryService indexQueryService;
private final IndexDeleteService indexDeleteService;
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
private String tenantUpdatedQueue;
@PostConstruct
public void postConstruct() {
log.info("Listener for tenant updated events started for queue: {}", this.tenantUpdatedQueue);
}
@RabbitListener(queues = "${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
public void updateTenant(TenantResponse tenant) {
String numberOfReplicas = tenant.getSearchConnection().getNumberOfReplicas();
String numberOfShards = tenant.getSearchConnection().getNumberOfShards();
IndexQueryResult queryResult = indexQueryService.getIndexQueryResult(tenant.getSearchConnection());
if (queryResult.isIndexFound() && (!numberOfReplicas.equals(queryResult.getNumberOfReplicas()) || !numberOfShards.equals(queryResult.getNumberOfShards()))) {
log.info("Number of shards or replicas were changed during tenant update, indices will be recreated");
indexDeleteService.recreateIndex(tenant.getSearchConnection());
}
}
}

View File

@ -5,6 +5,5 @@ import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
public interface DocumentUpdateService {
void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate);
boolean documentExists(String fileId);
}

View File

@ -1,19 +1,13 @@
package com.iqser.red.service.search.v1.server.service;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
public interface IndexDeleteService {
void recreateIndex();
void recreateIndex(SearchConnection searchConnection);
void closeIndex();
void dropIndex();
void dropIndex(SearchConnection searchConnection);
}

View File

@ -1,54 +1,36 @@
package com.iqser.red.service.search.v1.server.service;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.Set;
import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
import com.iqser.red.service.search.v1.server.model.IndexSection;
import com.iqser.red.service.search.v1.server.model.SectionArea;
import com.iqser.red.service.search.v1.server.model.SectionText;
import com.iqser.red.service.search.v1.server.model.Text;
@Service
public class IndexDocumentConverterService {
public IndexDocument convert(String dossierTemplateId,
String dossierId,
String fileId,
String filename,
Text text,
String assignee,
boolean deleted,
boolean archived,
WorkflowStatus workflowStatus,
Map<String, String> fileAttributes) {
public IndexDocument convert(String fileId, String filename, int sectionNr, Set<Integer> pages, String headline, String text, Map<String, String> fileAttributes, Float[] embeddingsVector) {
return IndexDocument.builder()
.dossierTemplateId(dossierTemplateId)
.dossierId(dossierId)
.fileId(fileId)
.filename(filename)
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
.date(Date.from(OffsetDateTime.now().toInstant()))
.sectionNumber(sectionNr)
.pages(pages)
.headline(headline)
.text(text)
.fileAttributes(convertFileAttributes(fileAttributes))
.assignee(assignee)
.dossierDeleted(deleted)
.dossierArchived(archived)
.workflowStatus(workflowStatus.name())
.wordEmbeddingsVector(embeddingsVector)
.build();
}
public IndexDocumentUpdate convertUpdateDocument(String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
return IndexDocumentUpdate.builder()
.assignee(assignee)
.dossierDeleted(deleted)
@ -68,17 +50,4 @@ public class IndexDocumentConverterService {
return converted;
}
private IndexSection convert(SectionText sectionText) {
return IndexSection.builder()
.sectionNumber(sectionText.getSectionNumber())
.text(sectionText.getText())
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
.headline(sectionText.getHeadline())
.build();
}
}

View File

@ -68,13 +68,13 @@ public class IndexInformationService {
byte[] buffer = new byte[8192];
int count;
MessageDigest digest = MessageDigest.getInstance("SHA-256");
try (BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream())) {
while ((count = bis.read(buffer)) > 0) {
digest.update(buffer, 0, count);
}
return Arrays.toString(digest.digest());
BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream());
while ((count = bis.read(buffer)) > 0) {
digest.update(buffer, 0, count);
}
bis.close();
return Arrays.toString(digest.digest());
}
}

View File

@ -1,17 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import lombok.AccessLevel;
import lombok.Builder;
import lombok.Data;
import lombok.experimental.FieldDefaults;
@Data
@Builder
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
public class IndexQueryResult {
boolean indexFound;
String numberOfShards;
String numberOfReplicas;
}

View File

@ -1,9 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
public interface IndexQueryService {
IndexQueryResult getIndexQueryResult(SearchConnection searchConnection);
}

View File

@ -4,9 +4,9 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;

View File

@ -6,9 +6,9 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
@ -26,15 +26,11 @@ public class DocumentDeleteServiceImpl implements DocumentDeleteService {
public void deleteDocument(String fileId) {
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.build();
DeleteRequest request = new DeleteRequest.Builder().index(TenantContext.getTenantId()).id(fileId).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())).build();
try {
clientCache.getClient().delete(request);
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentDeleteError(fileId, e);
}
}

View File

@ -7,9 +7,9 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
@ -32,14 +32,13 @@ public class DocumentIndexServiceImpl implements DocumentIndexService {
try {
clientCache.getClient()
.index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(indexDocument.getFileId())
.index(i -> i.index(TenantContext.getTenantId())
.id(indexDocument.getFileId()+"_"+indexDocument.getSectionNumber())
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.document(indexDocument));
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
}
}
}
}

View File

@ -7,9 +7,9 @@ import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch._types.Refresh;
@ -32,22 +32,11 @@ public class DocumentUpdateServiceImpl implements DocumentUpdateService {
try {
clientCache.getClient()
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.doc(indexDocumentUpdate)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
.update(u -> u.index(TenantContext.getTenantId()).id(fileId).doc(indexDocumentUpdate).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())),
IndexDocumentUpdate.class);
} catch (IOException | ElasticsearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentUpdateError(fileId, e);
}
}
@SneakyThrows
@Timed("redactmanager_documentExists")
public boolean documentExists(String fileId) {
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
}
}
}

View File

@ -2,7 +2,6 @@ package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.stream.Collectors;
import lombok.SneakyThrows;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
@ -11,7 +10,7 @@ import org.apache.http.impl.client.BasicCredentialsProvider;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
import co.elastic.clients.elasticsearch.ElasticsearchClient;
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
@ -21,11 +20,10 @@ import lombok.Data;
import lombok.experimental.Delegate;
@Data
@SuppressWarnings("PMD")
public class EsClient {
// Lower timeouts should be set per request.
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
private SearchConnection searchConnection;
@ -38,12 +36,11 @@ public class EsClient {
HttpHost[] httpHost = searchConnection.getHosts()
.stream()
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
.toList()
.collect(Collectors.toList())
.toArray(new HttpHost[searchConnection.getHosts().size()]);
var builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT)
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
RestClientBuilder builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT).setSocketTimeout(ABSURD_HIGH_TIMEOUT));
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
@ -57,10 +54,4 @@ public class EsClient {
this.elasticsearchClient = new ElasticsearchClient(transport);
}
@SneakyThrows
public void terminate() {
elasticsearchClient._transport().close();
}
}

View File

@ -1,6 +1,9 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
@ -10,11 +13,11 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import com.iqser.red.service.search.v1.server.client.TenantsClient;
import com.iqser.red.service.search.v1.server.model.Connection;
import com.iqser.red.service.search.v1.server.multitenancy.EncryptionDecryptionService;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@ -35,57 +38,37 @@ public class EsClientCache {
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
private Integer expireAfterAccess;
private LoadingCache<String, EsClient> clients;
@SneakyThrows
public void isClientAliveOrTerminate() {
try {
var client = clients.get(TenantContext.getTenantId());
try {
log.info("Checking if client is still alive: {}", client.info());
} catch (Exception e) {
try {
client.terminate();
} catch (Exception e2) {
log.info("Failed to terminate ES Client");
clients.invalidate(TenantContext.getTenantId());
}
}
}catch (Exception e){
log.error("Failed to terminate/invalide client", e);
}
}
private LoadingCache<String, Connection> connections;
private LoadingCache<Connection, EsClient> clients;
@PostConstruct
protected void createCache() {
connections = CacheBuilder.newBuilder().maximumSize(maximumSize).expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES).build(new CacheLoader<>() {
public Connection load(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
var hostsAsString = tenant.getSearchConnection().getHosts().stream().collect(Collectors.joining());
return Connection.builder().hosts(hostsAsString).searchConnection(tenant.getSearchConnection()).build();
}
});
clients = CacheBuilder.newBuilder()
.maximumSize(maximumSize)
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
.removalListener((RemovalListener<String, EsClient>) removal -> {
try {
removal.getValue().terminate();
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
} catch (Exception e) {
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
}
.removalListener((RemovalListener<Connection, EsClient>) removal -> {
removal.getValue().shutdown();
log.info("Closed elasticsearch client for tenant {}", removal.getKey().getHosts());
})
.build(new CacheLoader<>() {
public EsClient load(String tenantId) {
public EsClient load(Connection key) {
var tenant = tenantsClient.getTenant(tenantId);
if (tenant.getSearchConnection().getPassword() != null) {
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
if (key.getSearchConnection().getPassword() != null) {
key.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(key.getSearchConnection().getPassword()));
}
var client = new EsClient(tenant.getSearchConnection());
log.info("Initialized elasticsearch client for tenant {}", tenantId);
var client = new EsClient(key.getSearchConnection());
log.info("Initialized elasticsearch client for tenant {}", key);
indexCreatorService.createIndex(client);
return client;
}
@ -96,7 +79,8 @@ public class EsClientCache {
@SneakyThrows
public EsClient getClient() {
return clients.get(TenantContext.getTenantId());
var connection = connections.get(TenantContext.getTenantId());
return clients.get(connection);
}
}
}

View File

@ -9,8 +9,8 @@ import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
import co.elastic.clients.elasticsearch.indices.IndexSettings;
@ -23,7 +23,6 @@ import lombok.extern.slf4j.Slf4j;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
@SuppressWarnings("PMD")
public class IndexCreatorServiceImpl {
private final ElasticsearchSettings settings;
@ -33,10 +32,7 @@ public class IndexCreatorServiceImpl {
if (!indexExists(esClient)) {
try {
var response = esClient.indices()
.create(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix()))
.settings(createIndexSettings(esClient))
.mappings(createIndexMapping()));
var response = esClient.indices().create(i -> i.index(TenantContext.getTenantId()).settings(createIndexSettings(esClient)).mappings(createIndexMapping()));
log.info("Successfully created index: {}", response.index());
} catch (IOException e) {
log.error("Failed to create index.", e);
@ -48,7 +44,7 @@ public class IndexCreatorServiceImpl {
private boolean indexExists(EsClient esClient) {
try {
var response = esClient.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix())));
var response = esClient.indices().exists(i -> i.index(TenantContext.getTenantId()));
return response.value();
} catch (IOException e) {
throw IndexException.indexExists(e);
@ -81,4 +77,4 @@ public class IndexCreatorServiceImpl {
}
}
}
}

View File

@ -4,9 +4,8 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
@ -30,45 +29,10 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
}
@Override
public void recreateIndex(SearchConnection searchConnection) {
var client = new EsClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
indexCreatorService.createIndex(client);
}
@SneakyThrows
public void closeIndex() {
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
@SneakyThrows
public void dropIndex() {
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
public void dropIndex(SearchConnection searchConnection) {
var client = new EsClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
}
@SneakyThrows
private void closeIndex(EsClient client, String indexPrefix) {
var closeIndexResponse = client.indices()
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
var closeIndexResponse = clientCache.getClient().indices().close(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
if (closeIndexResponse.acknowledged()) {
log.info("Index is closed");
} else {
@ -78,11 +42,10 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
@SneakyThrows
private void dropIndex(EsClient client, String indexPrefix) {
public void dropIndex() {
log.info("Will drop index");
var deleteIndexResponse = client.indices()
.delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
var deleteIndexResponse = clientCache.getClient().indices().delete(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
if (deleteIndexResponse.acknowledged()) {
log.info("Index is dropped");
@ -91,4 +54,4 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
}
}
}
}

View File

@ -1,70 +0,0 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.util.Optional;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
import co.elastic.clients.elasticsearch.indices.GetIndicesSettingsResponse;
import co.elastic.clients.elasticsearch.indices.IndexState;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
@SuppressWarnings("PMD")
public class IndexQueryServiceImpl implements IndexQueryService {
@SneakyThrows
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
getIndexState(searchConnection).ifPresent(indexState -> {
builder.indexFound(true);
var indexSettings = indexState.settings();
if (indexSettings != null) {
String replicas = indexSettings.numberOfReplicas();
String shards = indexSettings.numberOfShards();
if (indexSettings.index() != null) {
if (replicas == null) {
replicas = indexSettings.index().numberOfReplicas();
}
if (shards == null) {
shards = indexSettings.index().numberOfShards();
}
}
builder.numberOfReplicas(replicas).numberOfShards(shards);
}
});
return builder.build();
}
@SneakyThrows
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
var esClient = new EsClient(searchConnection);
var indexName = IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix());
try {
GetIndicesSettingsResponse settings = esClient.indices().getSettings(i -> i.index(indexName));
return Optional.ofNullable(settings.get(indexName));
} catch (ElasticsearchException elasticsearchException) {
return Optional.empty();
}
}
}

View File

@ -1,337 +1,335 @@
package com.iqser.red.service.search.v1.server.service.elasticsearch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.MatchedSection;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.Query;
import com.iqser.red.service.search.v1.server.service.SearchService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
import co.elastic.clients.elasticsearch._types.FieldValue;
import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
import co.elastic.clients.elasticsearch.core.SearchRequest;
import co.elastic.clients.elasticsearch.core.SearchResponse;
import co.elastic.clients.elasticsearch.core.search.HighlightField;
import co.elastic.clients.elasticsearch.core.search.HighlighterType;
import co.elastic.clients.elasticsearch.core.search.Hit;
import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
import co.elastic.clients.json.JsonData;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.util.StringUtils;
import jakarta.json.JsonObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
public class SearchServiceImpl implements SearchService {
private final EsClientCache clientCache;
@Timed("redactmanager_search")
public SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections) {
Query query = QueryStringConverter.convert(queryString);
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
highlightFieldMap.put("filename", new HighlightField.Builder().build());
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.query(convertQuery(query,
dossierTemplateIds,
dossierIds,
fileId,
assignee,
includeDeletedDossiers,
includeArchivedDossiers,
workflowStatus,
fileAttributes,
returnSections))
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
.size(getPageSizeOrDefault(pageSize))
.source(s -> s.filter(f -> f.includes("dossierId",
"dossierTemplateId",
"dossierDeleted",
"dossierArchived",
"filename",
"fileId",
"assignee",
"dossierStatus",
"workflowStatus",
"fileAttributes")))
.highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
.trackScores(true)
.build();
SearchResponse response = execute(request);
return convert(response, query);
}
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
try {
return clientCache.getClient().search(searchRequest, IndexDocument.class);
} catch (IOException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.searchFailed(e);
}
}
private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
boolean returnSections) {
var entireQuery = QueryBuilders.bool();
var sectionsQueries = QueryBuilders.bool();
for (String must : query.getMusts()) {
var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
.query(must.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + must));
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
entireQuery.must(filenameOrTextMustQuery);
sectionsQueries.should(textPhraseQuery);
}
for (String should : query.getShoulds()) {
var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
.query(should.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + should));
entireQuery.should(textTermQuery);
entireQuery.should(filenameTermQuery);
entireQuery.should(fileAttributesPhraseQuery);
sectionsQueries.should(textTermQuery);
}
if (returnSections) {
var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
.queryName("sections")
.query(sectionsQueries.build()._toQuery())
.path("sections")
.innerHits(i -> i.size(100)));
entireQuery.should(nestedQuery);
}
var filterQuery = QueryBuilders.bool();
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
for (var dossierTemplateId : dossierTemplateIds) {
if (StringUtils.isNotEmpty(dossierTemplateId)) {
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
}
}
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
}
if (dossierIds != null && !dossierIds.isEmpty()) {
var dossierIdQueryBuilder = QueryBuilders.bool();
for (var dossierId : dossierIds) {
if (StringUtils.isNotEmpty(dossierId)) {
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
}
}
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
}
if (StringUtils.isNotEmpty(fileId)) {
filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
}
if (StringUtils.isNotEmpty(assignee)) {
filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
}
if (includeArchivedDossiers) {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
} else {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
}
if (includeDeletedDossiers) {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
} else {
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
}
if (StringUtils.isNotEmpty(workflowStatus)) {
filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
}
if (fileAttributes != null && !fileAttributes.isEmpty()) {
var fileAttributesQueryBuilder = QueryBuilders.bool();
for (var fileAttributeKey : fileAttributes.keySet()) {
if (StringUtils.isNotEmpty(fileAttributeKey)) {
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
.must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
.must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
.build()
._toQuery()));
}
}
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
}
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
}
private SearchResult convert(SearchResponse response, Query query) {
List<Hit> hits = response.hits().hits();
return SearchResult.builder()
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
.maxScore(response.hits().maxScore().floatValue())
.total(response.hits().total().value())
.build();
}
private MatchedDocument convertSearchHit(Hit hit, Query query) {
List<String> m = hit.matchedQueries();
Set<String> matchesTerms = m.stream()
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
.collect(Collectors.toSet());
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
IndexDocument indexDocument = (IndexDocument) hit.source();
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
.score(hit.score().floatValue())
.dossierId(indexDocument.getDossierId())
.dossierTemplateId(indexDocument.getDossierTemplateId())
.fileId(indexDocument.getFileId())
.assignee(indexDocument.getAssignee())
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
.workflowStatus(indexDocument.getWorkflowStatus())
.fileName(indexDocument.getFilename())
.dossierDeleted(indexDocument.isDossierDeleted())
.dossierArchived(indexDocument.isDossierArchived())
.highlights(hit.highlight())
.matchedTerms(matchesTerms)
.unmatchedTerms(unmatchedTerms);
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
}
return matchedDocumentBuilder.build();
}
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
Map<String, String> fileAttributes = new HashMap<>();
if (fileAttributesSourceMap != null) {
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
}
return fileAttributes;
}
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
JsonObject indexSection = hit.source().toJson().asJsonObject();
var jsonArray = indexSection.getJsonArray("pages");
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
return MatchedSection.builder()
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
.sectionNumber(indexSection.getString("sectionNumber"))
.pages(pages)
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
.build();
}
private int getPageSizeOrDefault(int pageSize) {
if (pageSize <= 0) {
return 10;
}
return pageSize;
}
private int getPageOrDefault(int page) {
if (page < 0) {
return 0;
}
return page;
}
}
//package com.iqser.red.service.search.v1.server.service.elasticsearch;
//
//import java.io.IOException;
//import java.util.ArrayList;
//import java.util.HashMap;
//import java.util.List;
//import java.util.Locale;
//import java.util.Map;
//import java.util.Set;
//import java.util.stream.Collectors;
//import java.util.stream.IntStream;
//import java.util.stream.Stream;
//
//import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
//import org.springframework.stereotype.Service;
//
//import com.fasterxml.jackson.databind.ObjectMapper;
//import com.iqser.red.service.search.v1.model.MatchedDocument;
//import com.iqser.red.service.search.v1.model.MatchedSection;
//import com.iqser.red.service.search.v1.model.SearchResult;
//import com.iqser.red.service.search.v1.server.exception.IndexException;
//import com.iqser.red.service.search.v1.server.model.IndexDocument;
//import com.iqser.red.service.search.v1.server.model.Query;
//import com.iqser.red.service.search.v1.server.service.SearchService;
//import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
//
//import co.elastic.clients.elasticsearch._types.FieldValue;
//import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
//import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
//import co.elastic.clients.elasticsearch.core.SearchRequest;
//import co.elastic.clients.elasticsearch.core.SearchResponse;
//import co.elastic.clients.elasticsearch.core.search.HighlightField;
//import co.elastic.clients.elasticsearch.core.search.HighlighterType;
//import co.elastic.clients.elasticsearch.core.search.Hit;
//import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
//import co.elastic.clients.json.JsonData;
//import io.micrometer.core.annotation.Timed;
//import io.micrometer.core.instrument.util.StringUtils;
//import jakarta.json.JsonObject;
//import lombok.RequiredArgsConstructor;
//import lombok.extern.slf4j.Slf4j;
//
//@Slf4j
//@Service
//@RequiredArgsConstructor
//@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
//public class SearchServiceImpl implements SearchService {
//
// private final EsClientCache clientCache;
//
//
//
// @Timed("redactmanager_search")
// public SearchResult search(String queryString,
// List<String> dossierTemplateIds,
// List<String> dossierIds,
// String fileId,
// String assignee,
// boolean includeDeletedDossiers,
// boolean includeArchivedDossiers,
// String workflowStatus,
// Map<String, String> fileAttributes,
// int page,
// int pageSize,
// boolean returnSections) {
//
// Query query = QueryStringConverter.convert(queryString);
//
// Map<String, HighlightField> highlightFieldMap = new HashMap<>();
// highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
// highlightFieldMap.put("filename", new HighlightField.Builder().build());
// highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
//
// SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
// dossierTemplateIds,
// dossierIds,
// fileId,
// assignee,
// includeDeletedDossiers,
// includeArchivedDossiers,
// workflowStatus,
// fileAttributes,
// returnSections))
// .from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
// .size(getPageSizeOrDefault(pageSize))
// .source(s -> s.filter(f -> f.includes("dossierId",
// "dossierTemplateId",
// "dossierDeleted",
// "dossierArchived",
// "filename",
// "fileId",
// "assignee",
// "dossierStatus",
// "workflowStatus",
// "fileAttributes")))
// .highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
// .trackScores(true)
// .build();
//
// SearchResponse response = execute(request);
//
// return convert(response, query);
// }
//
//
// protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
//
// try {
// return clientCache.getClient().search(searchRequest, IndexDocument.class);
// } catch (IOException e) {
// throw IndexException.searchFailed(e);
// }
// }
//
//
// private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
// List<String> dossierTemplateIds,
// List<String> dossierIds,
// String fileId,
// String assignee,
// boolean includeDeletedDossiers,
// boolean includeArchivedDossiers,
// String workflowStatus,
// Map<String, String> fileAttributes,
// boolean returnSections) {
//
// var entireQuery = QueryBuilders.bool();
// var sectionsQueries = QueryBuilders.bool();
//
// for (String must : query.getMusts()) {
//
// var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
// var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
// var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
// .query(must.toLowerCase(Locale.ROOT))
// .queryName("fileAttributes." + must));
//
// var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
// entireQuery.must(filenameOrTextMustQuery);
// sectionsQueries.should(textPhraseQuery);
// }
// for (String should : query.getShoulds()) {
//
// var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
// var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
// var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
// .query(should.toLowerCase(Locale.ROOT))
// .queryName("fileAttributes." + should));
// entireQuery.should(textTermQuery);
// entireQuery.should(filenameTermQuery);
// entireQuery.should(fileAttributesPhraseQuery);
// sectionsQueries.should(textTermQuery);
// }
//
// if (returnSections) {
// var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
// .queryName("sections")
// .query(sectionsQueries.build()._toQuery())
// .path("sections")
// .innerHits(i -> i.size(100)));
// entireQuery.should(nestedQuery);
// }
//
// var filterQuery = QueryBuilders.bool();
//
// if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
//
// var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
//
// for (var dossierTemplateId : dossierTemplateIds) {
// if (StringUtils.isNotEmpty(dossierTemplateId)) {
// dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
// }
// }
//
// filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
// }
//
// if (dossierIds != null && !dossierIds.isEmpty()) {
//
// var dossierIdQueryBuilder = QueryBuilders.bool();
//
// for (var dossierId : dossierIds) {
// if (StringUtils.isNotEmpty(dossierId)) {
// dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
// }
// }
//
// filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
// }
//
// if (StringUtils.isNotEmpty(fileId)) {
// filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
// }
//
// if (StringUtils.isNotEmpty(assignee)) {
// filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
// }
//
// if (includeArchivedDossiers) {
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
// .terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
// } else {
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
// }
//
// if (includeDeletedDossiers) {
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
// .terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
// } else {
// filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
// }
//
// if (StringUtils.isNotEmpty(workflowStatus)) {
// filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
// }
//
// if (fileAttributes != null && !fileAttributes.isEmpty()) {
// var fileAttributesQueryBuilder = QueryBuilders.bool();
//
// for (var fileAttributeKey : fileAttributes.keySet()) {
// if (StringUtils.isNotEmpty(fileAttributeKey)) {
// fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
// .must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
// .must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
// .build()
// ._toQuery()));
// }
// }
//
// filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
// }
//
// return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
// }
//
//
// private SearchResult convert(SearchResponse response, Query query) {
//
// List<Hit> hits = response.hits().hits();
//
// return SearchResult.builder()
// .matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
// .maxScore(response.hits().maxScore().floatValue())
// .total(response.hits().total().value())
// .build();
// }
//
//
// private MatchedDocument convertSearchHit(Hit hit, Query query) {
//
// List<String> m = hit.matchedQueries();
//
// Set<String> matchesTerms = m.stream()
// .map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
// .map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
// .collect(Collectors.toSet());
//
// Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
//
// IndexDocument indexDocument = (IndexDocument) hit.source();
//
// MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
// .score(hit.score().floatValue())
// .dossierId(indexDocument.getDossierId())
// .dossierTemplateId(indexDocument.getDossierTemplateId())
// .fileId(indexDocument.getFileId())
// .assignee(indexDocument.getAssignee())
// .fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
// .workflowStatus(indexDocument.getWorkflowStatus())
// .fileName(indexDocument.getFilename())
// .dossierDeleted(indexDocument.isDossierDeleted())
// .dossierArchived(indexDocument.isDossierArchived())
// .highlights(hit.highlight())
// .matchedTerms(matchesTerms)
// .unmatchedTerms(unmatchedTerms);
//
// if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
// InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
// matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
// .containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
// }
//
// return matchedDocumentBuilder.build();
//
// }
//
//
// private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
//
// Map<String, String> fileAttributes = new HashMap<>();
//
// if (fileAttributesSourceMap != null) {
// List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
// list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
// }
//
// return fileAttributes;
// }
//
//
// private MatchedSection convertInnerHit(Hit<JsonData> hit) {
//
// JsonObject indexSection = hit.source().toJson().asJsonObject();
//
// var jsonArray = indexSection.getJsonArray("pages");
// var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
//
// return MatchedSection.builder()
// .headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
// .sectionNumber(indexSection.getInt("sectionNumber"))
// .pages(pages)
// .matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
// .build();
// }
//
//
// private int getPageSizeOrDefault(int pageSize) {
//
// if (pageSize <= 0) {
// return 10;
// }
// return pageSize;
// }
//
//
// private int getPageOrDefault(int page) {
//
// if (page < 0) {
// return 0;
// }
// return page;
// }
//
//}

View File

@ -1,44 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.opensearch.client.opensearch.core.DeleteRequest;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import lombok.RequiredArgsConstructor;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
public void deleteDocument(String fileId) {
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.build();
try {
clientCache.getClient().delete(request);
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentDeleteError(fileId, e);
}
}
}

View File

@ -1,44 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentIndexServiceImpl implements DocumentIndexService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
@Timed("redactmanager_indexDocument")
public void indexDocument(IndexDocument indexDocument) {
try {
clientCache.getClient().index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(indexDocument.getFileId())
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
.document(indexDocument));
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
}
}
}

View File

@ -1,53 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch._types.Refresh;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import io.micrometer.core.annotation.Timed;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
private final OpensearchClientCache clientCache;
private final ElasticsearchSettings settings;
@SneakyThrows
@Timed("redactmanager_updateDocument")
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
try {
clientCache.getClient()
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.id(fileId)
.doc(indexDocumentUpdate)
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
} catch (IOException | OpenSearchException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.documentUpdateError(fileId, e);
}
}
@SneakyThrows
@Timed("redactmanager_documentExists")
public boolean documentExists(String fileId) {
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
}
}

View File

@ -1,101 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import org.opensearch.client.json.JsonpMapper;
import org.opensearch.client.opensearch._types.mapping.TypeMapping;
import org.opensearch.client.opensearch.indices.IndexSettings;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.core.io.ResourceLoader;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import jakarta.json.stream.JsonParser;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
@SuppressWarnings("PMD")
public class IndexCreatorServiceImpl {
private final ElasticsearchSettings settings;
public void createIndex(OpensearchClient client) {
if (!indexExists(client)) {
try {
var response = client.indices()
.create(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix()))
.settings(createIndexSettings(client))
.mappings(createIndexMapping(client)));
log.info("Successfully created index: {}", response.index());
} catch (IOException e) {
log.error("Failed to create index.", e);
}
}
}
private boolean indexExists(OpensearchClient client) {
try {
var response = client.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix())));
return response.value();
} catch (IOException e) {
throw IndexException.indexExists(e);
}
}
@SneakyThrows
private TypeMapping createIndexMapping(OpensearchClient client) {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
try (InputStream is = resource.openStream()) {
JsonpMapper mapper = client._transport().jsonpMapper();
JsonParser parser = mapper.jsonProvider().createParser(is);
return TypeMapping._DESERIALIZER.deserialize(parser, mapper);
}
}
@SneakyThrows
private IndexSettings createIndexSettings(OpensearchClient client) {
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
try (InputStream is = resource.openStream()) {
JsonpMapper mapper = client._transport().jsonpMapper();
JsonParser parser = mapper.jsonProvider().createParser(is);
var indexSettingsFromJson = IndexSettings._DESERIALIZER.deserialize(parser, mapper);
// It is not possible to set "index.mapping.nested_objects.limit", OpenSearch seems to not have this param.
// Hopefully they don't hava a limit for this, I was not able to find anything.
// As elasticsearch has a limit for this, and we can't set it, it seems this is the only reason for now to have both clients.
var indexSettings = new IndexSettings.Builder().index(indexSettingsFromJson.index())
.numberOfReplicas(client.getSearchConnection().getNumberOfReplicas())
.numberOfShards(client.getSearchConnection().getNumberOfShards())
.analysis(indexSettingsFromJson.analysis())
.build();
return indexSettings;
}
}
}

View File

@ -1,92 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class IndexDeleteServiceImpl implements IndexDeleteService {
private final OpensearchClientCache clientCache;
private final IndexCreatorServiceImpl indexCreatorService;
public void recreateIndex() {
closeIndex();
dropIndex();
indexCreatorService.createIndex(clientCache.getClient());
}
@Override
public void recreateIndex(SearchConnection searchConnection) {
var client = new OpensearchClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
indexCreatorService.createIndex(client);
}
@SneakyThrows
public void closeIndex() {
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
@SneakyThrows
public void dropIndex() {
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
}
public void dropIndex(SearchConnection searchConnection) {
var client = new OpensearchClient(searchConnection);
closeIndex(client, searchConnection.getIndexPrefix());
dropIndex(client, searchConnection.getIndexPrefix());
}
@SneakyThrows
private void closeIndex(OpensearchClient opensearchClient, String indexPrefix) {
var closeIndexResponse = opensearchClient.indices()
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (closeIndexResponse.acknowledged()) {
log.info("Index is closed");
} else {
throw new IndexException("Error while closing index");
}
}
@SneakyThrows
private void dropIndex(OpensearchClient opensearchClient, String indexPrefix) {
log.info("Will drop index");
var deleteIndexResponse = opensearchClient.indices().delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
if (deleteIndexResponse.acknowledged()) {
log.info("Index is dropped");
} else {
throw new IndexException("Error while dropping index");
}
}
}

View File

@ -1,56 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.util.Optional;
import org.opensearch.client.opensearch._types.OpenSearchException;
import org.opensearch.client.opensearch.indices.GetIndicesSettingsResponse;
import org.opensearch.client.opensearch.indices.IndexState;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
@SuppressWarnings("PMD")
public class IndexQueryServiceImpl implements IndexQueryService {
@SneakyThrows
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
Optional<IndexState> optionalIndexState = getIndexState(searchConnection);
if (optionalIndexState.isPresent()) {
builder.indexFound(true);
var indexSettings = optionalIndexState.get().settings();
if (indexSettings != null) {
builder.numberOfReplicas(indexSettings.numberOfReplicas()).numberOfShards(indexSettings.numberOfShards());
}
}
return builder.build();
}
@SneakyThrows
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
var opensearchClient = new OpensearchClient(searchConnection);
var indexName = IndexNameHelper.getSearchIndex(opensearchClient.getSearchConnection().getIndexPrefix());
try {
GetIndicesSettingsResponse settings = opensearchClient.indices().getSettings(i -> i.index(indexName));
return Optional.ofNullable(settings.get(indexName));
} catch (OpenSearchException openSearchException) {
return Optional.empty();
}
}
}

View File

@ -1,62 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import lombok.Data;
import lombok.SneakyThrows;
import lombok.experimental.Delegate;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.opensearch.client.RestClient;
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.transport.rest_client.RestClientTransport;
@Data
@SuppressWarnings("PMD")
public class OpensearchClient {
// Lower timeouts should be set per request.
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
private SearchConnection searchConnection;
@Delegate
private OpenSearchClient client;
public OpensearchClient(SearchConnection searchConnection) {
HttpHost[] httpHost = searchConnection.getHosts()
.stream()
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
.toList()
.toArray(new HttpHost[searchConnection.getHosts().size()]);
var builder = RestClient.builder(httpHost)
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(0)
.setConnectionRequestTimeout(ABSURD_HIGH_TIMEOUT)
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
}
var transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
this.searchConnection = searchConnection;
this.client = new OpenSearchClient(transport);
}
@SneakyThrows
public void terminate() {
client._transport().close();
}
}

View File

@ -1,101 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.util.concurrent.TimeUnit;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.cache.RemovalListener;
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import jakarta.annotation.PostConstruct;
import lombok.RequiredArgsConstructor;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class OpensearchClientCache {
private final TenantsClient tenantsClient;
private final EncryptionDecryptionService encryptionDecryptionService;
private final IndexCreatorServiceImpl indexCreatorService;
@Value("${multitenancy.client-cache.maximumSize:100}")
private Long maximumSize;
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
private Integer expireAfterAccess;
private LoadingCache<String, OpensearchClient> clients;
@SneakyThrows
public void isClientAliveOrTerminate() {
try {
var client = clients.get(TenantContext.getTenantId());
try {
log.info("Checking if client is still alive: {}", client.info());
} catch (Exception e) {
try {
client.terminate();
} catch (Exception e2) {
log.info("Failed to terminate ES Client");
clients.invalidate(TenantContext.getTenantId());
}
}
}catch (Exception e){
log.error("Failed to terminate/invalide client", e);
}
}
@PostConstruct
protected void createCache() {
clients = CacheBuilder.newBuilder()
.maximumSize(maximumSize)
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
.removalListener((RemovalListener<String, OpensearchClient>) removal -> {
try {
removal.getValue().terminate();
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
} catch (Exception e) {
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
}
})
.build(new CacheLoader<>() {
public OpensearchClient load(String tenantId) {
var tenant = tenantsClient.getTenant(tenantId);
if (tenant.getSearchConnection().getPassword() != null) {
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
}
var client = new OpensearchClient(tenant.getSearchConnection());
log.info("Initialized elasticsearch client for tenant {}", tenantId);
indexCreatorService.createIndex(client);
return client;
}
});
}
@SneakyThrows
public OpensearchClient getClient() {
return clients.get(TenantContext.getTenantId());
}
}

View File

@ -1,357 +0,0 @@
package com.iqser.red.service.search.v1.server.service.opensearch;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.stream.Stream;
import org.opensearch.client.json.JsonData;
import org.opensearch.client.opensearch._types.FieldValue;
import org.opensearch.client.opensearch._types.query_dsl.ChildScoreMode;
import org.opensearch.client.opensearch._types.query_dsl.QueryBuilders;
import org.opensearch.client.opensearch.core.SearchRequest;
import org.opensearch.client.opensearch.core.SearchResponse;
import org.opensearch.client.opensearch.core.search.BuiltinHighlighterType;
import org.opensearch.client.opensearch.core.search.HighlightField;
import org.opensearch.client.opensearch.core.search.HighlighterType;
import org.opensearch.client.opensearch.core.search.Hit;
import org.opensearch.client.opensearch.core.search.InnerHitsResult;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.search.v1.model.MatchedDocument;
import com.iqser.red.service.search.v1.model.MatchedSection;
import com.iqser.red.service.search.v1.model.SearchResult;
import com.iqser.red.service.search.v1.server.exception.IndexException;
import com.iqser.red.service.search.v1.server.model.IndexDocument;
import com.iqser.red.service.search.v1.server.model.Query;
import com.iqser.red.service.search.v1.server.service.SearchService;
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
import io.micrometer.core.annotation.Timed;
import io.micrometer.core.instrument.util.StringUtils;
import jakarta.json.JsonObject;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@Service
@RequiredArgsConstructor
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
public class SearchServiceImpl implements SearchService {
private final OpensearchClientCache clientCache;
@Timed("redactmanager_search")
public SearchResult search(String queryString,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
int page,
int pageSize,
boolean returnSections) {
Query query = QueryStringConverter.convert(queryString);
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
highlightFieldMap.put("filename", new HighlightField.Builder().build());
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
.query(convertQuery(query,
dossierTemplateIds,
dossierIds,
fileId,
assignee,
includeDeletedDossiers,
includeArchivedDossiers,
workflowStatus,
fileAttributes,
returnSections))
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
.size(getPageSizeOrDefault(pageSize))
.source(s -> s.filter(f -> f.includes("dossierId",
"dossierTemplateId",
"dossierDeleted",
"dossierArchived",
"filename",
"fileId",
"assignee",
"dossierStatus",
"workflowStatus",
"fileAttributes")))
.highlight(h -> h.type(HighlighterType.of(b -> b.builtin(BuiltinHighlighterType.FastVector))).fields(highlightFieldMap))
.trackScores(true)
.build();
SearchResponse response = execute(request);
return convert(response, query);
}
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
try {
return clientCache.getClient().search(searchRequest, IndexDocument.class);
} catch (IOException e) {
clientCache.isClientAliveOrTerminate();
throw IndexException.searchFailed(e);
}
}
private org.opensearch.client.opensearch._types.query_dsl.Query convertQuery(Query query,
List<String> dossierTemplateIds,
List<String> dossierIds,
String fileId,
String assignee,
boolean includeDeletedDossiers,
boolean includeArchivedDossiers,
String workflowStatus,
Map<String, String> fileAttributes,
boolean returnSections) {
var entireQuery = QueryBuilders.bool();
var sectionsQueries = QueryBuilders.bool();
for (String must : query.getMusts()) {
var textPhraseQuery = QueryBuilders.matchPhrase().field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must).build()._toQuery();
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must).build()._toQuery();
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
.field("fileAttributes.value")
.query(must.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + must)
.build()
._toQuery();
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
entireQuery.must(filenameOrTextMustQuery);
sectionsQueries.should(textPhraseQuery);
}
for (String should : query.getShoulds()) {
var textTermQuery = QueryBuilders.matchPhrase().field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should).build()._toQuery();
var filenameTermQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should).build()._toQuery();
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
.field("fileAttributes.value")
.query(should.toLowerCase(Locale.ROOT))
.queryName("fileAttributes." + should)
.build()
._toQuery();
entireQuery.should(textTermQuery);
entireQuery.should(filenameTermQuery);
entireQuery.should(fileAttributesPhraseQuery);
sectionsQueries.should(textTermQuery);
}
if (returnSections) {
var nestedQuery = QueryBuilders.nested()
.scoreMode(ChildScoreMode.Avg)
.queryName("sections")
.query(sectionsQueries.build()._toQuery())
.path("sections")
.innerHits(i -> i.size(100))
.build()
._toQuery();
entireQuery.should(nestedQuery);
}
var filterQuery = QueryBuilders.bool();
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
for (var dossierTemplateId : dossierTemplateIds) {
if (StringUtils.isNotEmpty(dossierTemplateId)) {
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match()
.field("dossierTemplateId")
.query(q -> q.stringValue(dossierTemplateId))
.build()
._toQuery());
}
}
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
}
if (dossierIds != null && !dossierIds.isEmpty()) {
var dossierIdQueryBuilder = QueryBuilders.bool();
for (var dossierId : dossierIds) {
if (StringUtils.isNotEmpty(dossierId)) {
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match().field("dossierId").query(q -> q.stringValue(dossierId)).build()._toQuery());
}
}
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
}
if (StringUtils.isNotEmpty(fileId)) {
filterQuery.must(QueryBuilders.match().field("fileId").query(q -> q.stringValue(fileId)).build()._toQuery());
}
if (StringUtils.isNotEmpty(assignee)) {
filterQuery.must(QueryBuilders.match().field("assignee").query(q -> q.stringValue(assignee)).build()._toQuery());
}
if (includeArchivedDossiers) {
filterQuery.must(QueryBuilders.terms()
.field("dossierArchived")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
.build()
._toQuery());
} else {
filterQuery.must(QueryBuilders.terms().field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
}
if (includeDeletedDossiers) {
filterQuery.must(QueryBuilders.terms()
.field("dossierDeleted")
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
.build()
._toQuery());
} else {
filterQuery.must(QueryBuilders.terms().field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
}
if (StringUtils.isNotEmpty(workflowStatus)) {
filterQuery.must(QueryBuilders.match().field("workflowStatus").query(q -> q.stringValue(workflowStatus)).build()._toQuery());
}
if (fileAttributes != null && !fileAttributes.isEmpty()) {
var fileAttributesQueryBuilder = QueryBuilders.bool();
for (var fileAttributeKey : fileAttributes.keySet()) {
if (StringUtils.isNotEmpty(fileAttributeKey)) {
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
.must(QueryBuilders.match().field("fileAttributes.name").query(q -> q.stringValue(fileAttributeKey)).build()._toQuery())
.must(QueryBuilders.match().field("fileAttributes.value").query(q -> q.stringValue(fileAttributes.get(fileAttributeKey))).build()._toQuery())
.build()
._toQuery()));
}
}
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
}
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
}
private SearchResult convert(SearchResponse response, Query query) {
List<Hit> hits = response.hits().hits();
return SearchResult.builder()
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
.maxScore(response.maxScore() == null ? 0 : response.maxScore().floatValue())
.total(response.hits().total().value())
.build();
}
private MatchedDocument convertSearchHit(Hit hit, Query query) {
List<String> m = hit.matchedQueries();
Set<String> matchesTerms = m.stream()
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
.collect(Collectors.toSet());
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
IndexDocument indexDocument = (IndexDocument) hit.source();
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
.score(hit.score().floatValue())
.dossierId(indexDocument.getDossierId())
.dossierTemplateId(indexDocument.getDossierTemplateId())
.fileId(indexDocument.getFileId())
.assignee(indexDocument.getAssignee())
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
.workflowStatus(indexDocument.getWorkflowStatus())
.fileName(indexDocument.getFilename())
.dossierDeleted(indexDocument.isDossierDeleted())
.dossierArchived(indexDocument.isDossierArchived())
.highlights(hit.highlight())
.matchedTerms(matchesTerms)
.unmatchedTerms(unmatchedTerms);
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
}
return matchedDocumentBuilder.build();
}
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
Map<String, String> fileAttributes = new HashMap<>();
if (fileAttributesSourceMap != null) {
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
}
return fileAttributes;
}
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
JsonObject indexSection = hit.source().toJson().asJsonObject();
var jsonArray = indexSection.getJsonArray("pages");
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
return MatchedSection.builder()
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
.sectionNumber(indexSection.getString("sectionNumber"))
.pages(pages)
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
.build();
}
private int getPageSizeOrDefault(int pageSize) {
if (pageSize <= 0) {
return 10;
}
return pageSize;
}
private int getPageOrDefault(int page) {
if (page < 0) {
return 0;
}
return page;
}
}

View File

@ -1,16 +0,0 @@
package com.iqser.red.service.search.v1.server.utils;
import lombok.experimental.UtilityClass;
@UtilityClass
public class IndexNameHelper {
private static final String SEARCH_INDEX = "%s_search";
public String getSearchIndex(String indexPrefix) {
return String.format(SEARCH_INDEX, indexPrefix);
}
}

View File

@ -1,5 +0,0 @@
server:
port: 8099
persistence-service.url: "http://localhost:8085"
tenant-user-management-service.url: "http://localhost:8091/tenant-user-management/internal"

View File

@ -2,28 +2,11 @@ info:
description: Search Service Server V1
persistence-service.url: "http://persistence-service-v1:8080"
tenant-user-management-service.url: "http://tenant-user-management-service:8080/internal"
fforesight:
tenants.remote: true
tenant-exchange.name: 'tenants-exchange'
logging.pattern.level: "%5p [${spring.application.name},%X{traceId:-},%X{spanId:-}]"
logging.type: ${LOGGING_TYPE:CONSOLE}
kubernetes.namespace: ${NAMESPACE:default}
project.version: 1.0-SNAPSHOT
server:
port: 8080
lifecycle:
base-package: com.iqser.red.service.search
spring:
application:
name: search-service
main:
allow-circular-references: true # FIXME
profiles:
@ -50,11 +33,9 @@ management:
health.enabled: true
endpoints.web.exposure.include: prometheus, health
metrics.export.prometheus.enabled: ${monitoring.enabled:false}
tracing:
enabled: ${TRACING_ENABLED:false}
sampling:
probability: ${TRACING_PROBABILITY:1.0}
otlp:
tracing:
endpoint: ${OTLP_ENDPOINT:http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces}
search:
backend: elasticsearch
storage:
backend: 's3'

View File

@ -1,11 +1,5 @@
{
"properties": {
"dossierId": {
"type": "keyword"
},
"dossierTemplateId": {
"type": "keyword"
},
"fileId": {
"type": "keyword"
},
@ -14,21 +8,9 @@
"term_vector": "with_positions_offsets",
"analyzer": "filename_analyzer"
},
"date": {
"type": "date"
},
"assignee": {
"sectionNumber": {
"type": "keyword"
},
"workflowStatus": {
"type": "keyword"
},
"dossierArchived": {
"type": "boolean"
},
"dossierDeleted": {
"type": "boolean"
},
"fileAttributes": {
"type": "nested",
"include_in_parent": true,
@ -42,24 +24,21 @@
}
}
},
"sections": {
"type": "nested",
"include_in_parent": true,
"properties": {
"headline": {
"type": "text"
},
"sectionNumber": {
"type": "keyword"
},
"pages": {
"type": "keyword"
},
"text": {
"type": "text",
"term_vector": "with_positions_offsets"
}
}
"headline": {
"type": "text"
},
"pages": {
"type": "keyword"
},
"text": {
"type": "text",
"term_vector": "with_positions_offsets"
},
"wordEmbeddingsVector": {
"type": "dense_vector",
"dims": 384,
"index": true,
"similarity": "cosine"
}
}
}

View File

@ -1,17 +0,0 @@
<configuration>
<springProperty scope="configuration" name="logType" source="logging.type"/>
<springProperty scope="context" name="application.name" source="spring.application.name"/>
<springProperty scope="context" name="version" source="project.version"/>
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
<appender name="JSON" class="ch.qos.logback.core.ConsoleAppender">
<encoder class="net.logstash.logback.encoder.LogstashEncoder"/>
</appender>
<root level="INFO">
<appender-ref ref="${logType}"/>
</root>
</configuration>

View File

@ -6,12 +6,9 @@ import java.util.Set;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.cloud.openfeign.EnableFeignClients;
@ -20,7 +17,6 @@ import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.context.annotation.Primary;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
@ -28,24 +24,20 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import org.testcontainers.utility.DockerImageName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.TenantResponse;
import com.iqser.red.service.search.v1.server.Application;
import com.iqser.red.service.search.v1.server.client.TenantsClient;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.iqser.red.storage.commons.service.StorageService;
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
@ComponentScan
@ExtendWith(SpringExtension.class)
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
@ContextConfiguration(initializers = {AbstractElasticsearchIntegrationTest.Initializer.class})
@EnableFeignClients(basePackageClasses = AbstractElasticsearchIntegrationTest.TestConfiguration.class)
@DirtiesContext
@AutoConfigureObservability
@SuppressWarnings("PMD")
public abstract class AbstractElasticsearchIntegrationTest {
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
@ -54,12 +46,6 @@ public abstract class AbstractElasticsearchIntegrationTest {
@MockBean
private TenantsClient tenantsClient;
@MockBean
private RabbitAdmin rabbitAdmin;
@MockBean
private RabbitListenerEndpointRegistry rabbitListenerEndpointRegistry;
private static int port;
@ -68,15 +54,8 @@ public abstract class AbstractElasticsearchIntegrationTest {
TenantContext.setTenantId("redaction");
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
.searchConnection(SearchConnection.builder()
.hosts(Set.of("localhost"))
.port(port)
.scheme("http")
.numberOfShards("1")
.numberOfReplicas("5")
.indexPrefix("indexprefix")
.build())
.build());
.searchConnection(SearchConnection.builder().hosts(Set.of("localhost")).port(port).scheme("http").numberOfShards("1").numberOfReplicas("5").build())
.build());
}
@ -84,7 +63,7 @@ public abstract class AbstractElasticsearchIntegrationTest {
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.0").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
esContainer.getEnvMap().put("xpack.security.enabled", "false");
esContainer.start();
@ -98,15 +77,14 @@ public abstract class AbstractElasticsearchIntegrationTest {
protected StorageService storageService;
@Configuration
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
@EnableAutoConfiguration(exclude = {StorageAutoConfiguration.class, RabbitAutoConfiguration.class})
public static class TestConfiguration {
@Bean
@Primary
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
public StorageService inmemoryStorage() {
return new FileSystemBackedStorageService(objectMapper);
return new FileSystemBackedStorageService();
}
}

View File

@ -18,22 +18,19 @@ import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.FilterType;
import org.springframework.context.annotation.Primary;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.testcontainers.utility.DockerImageName;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.TenantResponse;
import com.iqser.red.service.search.v1.server.Application;
import com.iqser.red.service.search.v1.server.client.TenantsClient;
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
import com.iqser.red.storage.commons.StorageAutoConfiguration;
import com.iqser.red.storage.commons.service.StorageService;
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
import com.knecon.fforesight.tenantcommons.TenantContext;
import com.knecon.fforesight.tenantcommons.TenantsClient;
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
@ComponentScan
@ExtendWith(SpringExtension.class)
@ -41,7 +38,6 @@ import com.knecon.fforesight.tenantcommons.model.TenantResponse;
@ContextConfiguration(initializers = {AbstractOpensearchIntegrationTest.Initializer.class})
@EnableFeignClients(basePackageClasses = AbstractOpensearchIntegrationTest.TestConfiguration.class)
@DirtiesContext
@SuppressWarnings("PMD")
public abstract class AbstractOpensearchIntegrationTest {
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
@ -58,14 +54,7 @@ public abstract class AbstractOpensearchIntegrationTest {
TenantContext.setTenantId("redaction");
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
.searchConnection(SearchConnection.builder()
.hosts(Set.of("localhost"))
.port(port)
.scheme("http")
.numberOfShards("1")
.numberOfReplicas("5")
.indexPrefix("indexprefix")
.build())
.searchConnection(SearchConnection.builder().hosts(Set.of("localhost")).port(port).scheme("http").numberOfShards("1").numberOfReplicas("5").build())
.build());
}
@ -88,15 +77,14 @@ public abstract class AbstractOpensearchIntegrationTest {
protected StorageService storageService;
@Configuration
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
@EnableAutoConfiguration(exclude = {StorageAutoConfiguration.class, RabbitAutoConfiguration.class})
public static class TestConfiguration {
@Bean
@Primary
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
public StorageService inmemoryStorage() {
return new FileSystemBackedStorageService(objectMapper);
return new FileSystemBackedStorageService();
}
}

View File

@ -0,0 +1,121 @@
package com.iqser.red.service.search.v1.server.service;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.springframework.core.io.InputStreamResource;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.commons.jackson.ObjectMapperFactory;
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
import com.iqser.red.storage.commons.service.StorageService;
import lombok.SneakyThrows;
public class FileSystemBackedStorageService implements StorageService {
private final Map<String, File> dataMap = new HashMap<>();
public FileSystemBackedStorageService() {
}
@SneakyThrows
@Override
public InputStreamResource getObject(String tenantId, String objectId) {
var res = dataMap.get(objectId);
if (res == null) {
throw new StorageObjectDoesNotExist(new RuntimeException());
}
return new InputStreamResource(new FileInputStream(res));
}
@Override
public void deleteObject(String tenantId, String objectId) {
dataMap.remove(objectId);
}
@Override
public boolean objectExists(String tenantId, String objectId) {
return dataMap.containsKey(objectId);
}
@Override
@SneakyThrows
public <T> void storeJSONObject(String tenantId, String objectId, T any) {
File tempFile = File.createTempFile("test", ".tmp");
getMapper().writeValue(new FileOutputStream(tempFile), any);
dataMap.put(objectId, tempFile);
}
private ObjectMapper getMapper() {
return ObjectMapperFactory.create();
}
@Override
@SneakyThrows
public <T> T readJSONObject(String tenantId, String objectId, Class<T> clazz) {
if (dataMap.get(objectId) == null || !dataMap.get(objectId).exists()) {
throw new StorageObjectDoesNotExist("Stored object not found");
}
return getMapper().readValue(new FileInputStream(dataMap.get(objectId)), clazz);
}
public List<String> listPaths() {
return new ArrayList<>(dataMap.keySet());
}
public List<String> listFilePaths() {
return dataMap.values().stream().map(File::getAbsolutePath).collect(Collectors.toList());
}
@Override
@SneakyThrows
public void storeObject(String tenantId, String objectId, InputStream stream) {
File tempFile = File.createTempFile("test", ".tmp");
try (var fileOutputStream = new FileOutputStream(tempFile)) {
IOUtils.copy(stream, fileOutputStream);
}
dataMap.put(objectId, tempFile);
}
public void clearStorage() {
this.dataMap.forEach((k, v) -> {
v.delete();
});
this.dataMap.clear();
}
}

View File

@ -1,80 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
import lombok.SneakyThrows;
public class IndexTest extends AbstractElasticsearchIntegrationTest {
@Autowired
private IndexInformationService indexInformationService;
@MockBean
private IndexInformationClient indexInformationClient;
@MockBean
private FileStatusClient fileStatusClient;
@MockBean
private IndexingMessageReceiver indexingMessageReceiver;
@Test
@SneakyThrows
public void testGenerateHash() {
// Act
String hash = indexInformationService.generateIndexConfigurationHash();
// Assert
System.out.println(hash);
Assertions.assertNotNull(hash);
}
@Test
@SneakyThrows
public void testHashChanged() {
// Arrange
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash("Some Hash").build();
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
// Act and Assert
Assertions.assertTrue(indexInformationService.hasIndexChanged());
}
@Test
@SneakyThrows
public void testHashChangedNot() {
// Arrange
String hash = indexInformationService.generateIndexConfigurationHash();
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash(hash).build();
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
// Act and Assert
Assertions.assertFalse(indexInformationService.hasIndexChanged());
}
@Test
@SneakyThrows
public void testHashDoesNotExist() {
// Arrange
when(indexInformationClient.getIndexInformation()).thenReturn(null);
// Act and Assert
Assertions.assertTrue(indexInformationService.hasIndexChanged());
}
}

View File

@ -1,93 +0,0 @@
package com.iqser.red.service.search.v1.server.service;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.Test;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.core.io.ClassPathResource;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
import com.iqser.red.service.search.v1.model.IndexMessage;
import com.iqser.red.service.search.v1.model.IndexMessageType;
import com.iqser.red.service.search.v1.model.SearchRequest;
import com.iqser.red.service.search.v1.server.client.DossierClient;
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
import com.iqser.red.service.search.v1.server.controller.SearchController;
import com.iqser.red.service.search.v1.server.model.Text;
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
import com.iqser.red.service.search.v1.server.service.utils.MetricValidationUtils;
import com.knecon.fforesight.tenantcommons.TenantContext;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import lombok.SneakyThrows;
public class MetricsIntegrationTest extends AbstractElasticsearchIntegrationTest {
@Autowired
private ObjectMapper objectMapper;
@Autowired
private IndexingMessageReceiver indexingMessageReceiver;
@Autowired
private PrometheusMeterRegistry prometheusMeterRegistry;
@Autowired
private SearchController searchController;
@MockBean
private FileStatusClient fileStatusClient;
@MockBean
private DossierClient dossierClient;
@MockBean
private FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
@MockBean
private IndexInformationClient indexInformationClient;
@MockBean
private RabbitTemplate rabbitTemplate;
@Test
@SneakyThrows
public void testMetrics() {
var dossier = new Dossier();
dossier.setId("1");
when(dossierClient.getDossierById("1", true, true)).thenReturn(dossier);
var file = new FileModel();
file.setDossierId("1");
file.setId("1");
file.setWorkflowStatus(WorkflowStatus.NEW);
when(fileStatusClient.getFileStatus("1", "1")).thenReturn(file);
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
storageService.storeJSONObject(TenantContext.getTenantId(), TextStorageService.StorageIdUtils.getStorageId("1", "1", FileType.SIMPLIFIED_TEXT), text);
IndexMessage indexRequest = new IndexMessage();
indexRequest.setDossierId("1");
indexRequest.setFileId("1");
indexRequest.setDossierTemplateId("1");
indexRequest.setMessageType(IndexMessageType.INSERT);
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
indexRequest.setMessageType(IndexMessageType.UPDATE);
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
searchController.getDossierStatus(SearchRequest.builder().queryString("test").build());
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_indexDocument", 1, null);
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_updateDocument", 1, null);
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_getTextSearchService", 1, null);
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_search", 1, null);
}
}

View File

@ -1,17 +1,9 @@
ribbon:
ConnectTimeout: 600000
ReadTimeout: 600000
logging.type: ${LOGGING_TYPE:CONSOLE}
logging.level.root: INFO
fforesight:
tenant-exchange:
name: 'tenants-exchange'
spring:
main:
allow-bean-definition-overriding: true
@ -29,8 +21,3 @@ management:
metrics.export.prometheus.enabled: true
persistence-service.url: 'http://mock.url'
server:
port: 19547
POD_NAME: search-service

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,373 @@
{
"sectionTexts": [
{
"text": "European Commission",
"sectionNumber": 1
},
{
"text": "Addendum VOLUME 2 Abamectin Rapporteur Member State: The Netherlands April 2015 February 2016 Draft Assessment Report and Proposed decision of the Netherlands prepared in the context of the possible extension of the approval conditions of abamectin under Regulation (EC) 1107/2009 Abamectin Volume 2 2",
"sectionNumber": 2
},
{
"text": "Date Version history April 2015 Initial version February 2015 Revised addendum to DAR in light of comments and additional information received",
"sectionNumber": 3
},
{
"text": "Version history page Abamectin Volume 2 3",
"sectionNumber": 4
},
{
"text": "TABLE OF CONTENTS VOLUME 1 A. List of the tests, studies and information submitted ........................................................................ 4 A.1 Identity ......................................................................................................................................... 4 A.2 Physical and chemical properties ............................................................................................... 4 A.3 Data on application and efficacy ................................................................................................. 6 A.4 Further information .................................................................................................................... 10 A.5 Methods of analysis .................................................................................................................. 10 A.6 Toxicology and metabolism data .............................................................................................. 11 A.7 Residue data ............................................................................................................................. 13 A.8 Environmental fate and behaviour ............................................................................................ 15 A.9 Ecotoxicology data .................................................................................................................... 16 Abamectin Volume 2 4",
"sectionNumber": 5
},
{
"text": "A. List of the tests, studies and information submitted A.1 Identity Refer to Volume 4.",
"sectionNumber": 6
},
{
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
"sectionNumber": 7
},
{
"text": "KIIIA1 2.1 / 01 & KIIIA1 2.4.1 / 01 & KIIIA1 2.4.2 / 01 & KIIIA1 2.5.2 / 01 & KIIIA1 2.5.3 / 01 & KIIIA1 2.6.1 / 01 & KIIIA1 2.8.6.1 / 01 Kalt R. 2009 A12115I - Physical properties of batch SMU9EP004 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120109 GLP, not published Syngenta File No A12115I_10005 N Y SYN",
"sectionNumber": 8
},
{
"text": "KIIIA1 2.2.1 / 01 & KIIIA1 2.2.2 / 01 & KIIIA1 2.3.1 / 01 & KIIIA1 2.3.3 / 01 Jackson W. 2009 A12115I - Physical and chemical properties Syngenta Syngenta Technology & Projects, Huddersfield, United Kingdom, HT09/241 GLP, not published Syngenta File No A12115I_10013 N Y SYN",
"sectionNumber": 9
},
{
"text": "KIIIA1 2.7.1 / 01 Kalt R. 2009a A12115I - Storage stability and shelf life statement (2 weeks 54°C) in packaging made of HDPE Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, Not GLP, not published Syngenta File No A12115I_10009 N N SYN",
"sectionNumber": 10
},
{
"text": "KIIIA1 2.7.1 / 02 Kalt R. 2009b A12115I - Storage stability and shelf life statement (2 weeks 54°C) in packaging made of PET Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, Not GLP, not published Syngenta File No A12115I_10010 N N SYN",
"sectionNumber": 11
},
{
"text": "KIIIA1 Kalt R. 2009c A12115I - Technical properties of batch N N SYN",
"sectionNumber": 12
},
{
"text": "2.7.4 / 01 & KIIIA1 2.8.2 / 01 & KIIIA1 2.8.3.1 / 01 & KIIIA1 2.8.3.2 / 01 & KIIIA1 2.8.5.2 / 01 & KIIIA1 2.8.8.2 / 01 SMU9EP004 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120251 Not GLP, not published Syngenta File No A12115I_10004",
"sectionNumber": 14
},
{
"text": "KIIIA1 2.7.5 / 01 & KIIIA1 4.1.3 / 01 Kalt R. 2011 A12115I - Storage stability and shelf life statement (2 years 25°C) in packaging made of HDPE Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 10478696 Not GLP, not published Syngenta File No A12115I_10039 N N SYN",
"sectionNumber": 15
},
{
"text": "KIIIA1 2.7.5 / 02 & KIIIA1 4.1.3 / 02 Kalt R. 2011a A12115I - Storage stability and shelf life statement (2 years 25°C) in packaging made of PET Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 10478743 Not GLP, not published Syngenta File No A12115I_10040 N N SYN",
"sectionNumber": 16
},
{
"text": "KIIIA1 3.3.1 / 01 Briswalter C. 2011 A12115I GAP Syngenta Crop Protection AG, Basel, Switzerland, Not GLP, not published Syngenta File No A12115I_10042 N N SYN",
"sectionNumber": 17
},
{
"text": "A.2 Physical and chemical properties Abamectin Volume 2 5 Abamectin Volume 2 6",
"sectionNumber": 19
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner SYN = Syngenta",
"sectionNumber": 20
},
{
"text": "KIIIA1 6/01 Ivacic D. 2012 A12115I (Tervigo) for nematodes on protected vegetables Syngenta - No Unublished Regulatory document No. A12115I_10165 Y SYN",
"sectionNumber": 21
},
{
"text": "",
"sectionNumber": 23
},
{
"text": "Abamectin Volume 2 KIIIA1 6.1.2/001 & KIIIA1 6.1.2/006 & KIIIA1 6.1.3/001 Rafael Munoz 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on Y SYN",
"sectionNumber": 24
},
{
"text": "Abamectin Volume 2 KIIIA1 6.1.2/001 & KIIIA1 6.1.2/006 & KIIIA1 6.1.3/001 vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2172009 GEP Unpublished",
"sectionNumber": 25
},
{
"text": "KIIIA1 6.1.2/002 & KIIIA1 6.1.3/002 Pedro Vega 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN3172009 GEP Unpublished Y SYN",
"sectionNumber": 26
},
{
"text": "KIIIA1 6.1.2/003 & KIIIA1 6.1.2/015 & KIIIA1 6.1.3/003 & KIIIA1 6.1.3/033 Sotiris Pantazis 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Greece Anadiag Hellas, GR45ZN1232009 GEP Unpublished Y SYN",
"sectionNumber": 27
},
{
"text": "KIIIA1 6.1.2/004 & KIIIA1 6.1.3/004 Antonio Aversa 2009 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN0522009 GEP Unpublished Y SYN",
"sectionNumber": 28
},
{
"text": "KIIIA1 6.1.2/005 & KIIIA1 6.1.3/005 & KIIIA1 6.1.3/011 Pedro Pablo Japón Fu 2009 Terra Nova - registration trials on fruiting vegetables Syngenta CP Spain Phytest Hispania S.l. ESPHZN0202009 GEP Unpublished Y SYN",
"sectionNumber": 29
},
{
"text": "KIIIA1 6.1.2/007 & KIIIA1 6.1.3/017 Ana Orrico Marín 2009 Terra Nova - registration trials on fruiting vegetables Syngenta CP Spain Sicop Spain, ESSCZN0422009 GEP Unpublished Y SYN",
"sectionNumber": 30
},
{
"text": "KIIIA1 6.1.2/008 & KIIIA1 6.1.3/019 Rafael Munoz 2009a 7 Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2142009 GEP Unpublished Y SYN",
"sectionNumber": 31
},
{
"text": "nematodes Syngenta CP Spain Syngenta Agro SA, ESSEZN2162009 GEP Unpublished",
"sectionNumber": 33
},
{
"text": "KIIIA1 6.1.3/014 & KIIIA1 6.1.3/027 Pedro Pablo Japón Fu 2011 Tervigo - registration trials on eggplant Syngenta CP Spain Phytest Hispania S.l. ESPHZN0012011 GEP Unpublished Y SYN",
"sectionNumber": 34
},
{
"text": "KIIIA1 6.1.3/015 Michele Derrico 2011 Tervigo - registration trials on eggplant Syngenta CP Italy Syngenta Crop Protection S.r.l., ITFGZN3302011 GEP Unpublished Y SYN",
"sectionNumber": 35
},
{
"text": "KIIIA1 6.1.3/016 Asero Giuseppe 2011 Tervigo - registration trials on eggplant Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN1412011 GEP Unpublished Y SYN",
"sectionNumber": 36
},
{
"text": "KIIIA1 6.1.3/018 Asero Giuseppe 2011a Tervigo - registration trials on peppers Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN2702011 GEP Unpublished Y SYN",
"sectionNumber": 37
},
{
"text": "KIIIA1 6.1.3/024 Rafael Munoz 2010 Terranova - registration trials on fruiting vegetables Syngenta CP Spain Syngenta Agro SA, ESSEZN2252010 GEP Unpublished Y SYN",
"sectionNumber": 38
},
{
"text": "KIIIA1 6.1.3/025 Adriano Giansante 2010 Terranova - registration trials on fruiting vegetables Syngenta CP Italy Syngenta Crop Protection S.r.l., ITCEZN2212010 GEP Unpublished Y SYN",
"sectionNumber": 39
},
{
"text": "KIIIA1 6.1.3/026 Rosario D'Asero 2010 8 Terranova - registration trials on fruiting vegetables Syngenta CP Italy Syngenta Crop Protection S.r.l., ITSOZN2222010 GEP Unpublished Y SYN",
"sectionNumber": 40
},
{
"text": "A.3 Data on application and efficacy Abamectin Volume 2 KIIIA1 6.1.3/012 Rafael Munoz 2009d Abamectin - Fe chelate - Delivery of BAD- Efficacy and selectivity registration trials on vegetables for the control of Y SYN Abamectin Volume 2 9 Abamectin Volume 2 10",
"sectionNumber": 42
},
{
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
"sectionNumber": 43
},
{
"text": "IIIA 4.2.2/01 Kalt, R 2009d A12115I - The effectiveness of the spray tank cleaning procedure Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120365 Not GLP, not published Syngenta File No A12115I_10012 N N SYN",
"sectionNumber": 44
},
{
"text": "A.4 Further information",
"sectionNumber": 46
},
{
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Owner",
"sectionNumber": 47
},
{
"text": "KIIIA1 5.2.1 / 01 Dos Santos Alves A. 2009 Determination of MK936 in A12115I Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, SF-328/1 Not GLP, not published Syngenta File No A12115I_10017 N N SYN",
"sectionNumber": 48
},
{
"text": "KIIIA1 5.2.1 / 02 Heintz K. 2009 A12115I - Validation of analytical method SF-328/1 Syngenta Syngenta Crop Protection, Münchwilen, Switzerland, 120106 GLP, not published Syngenta File No A12115I_10018 N Y SYN",
"sectionNumber": 49
},
{
"text": "A.5 Methods of analysis Abamectin Volume 2 11",
"sectionNumber": 51
},
{
"text": "Data point Author(s) Year Title Company Report No. Source (where different from company) GLP or GEP status Published or not Vertebrate study Y/N Data protection claimed Y/N Justification if data protection is claimed Owner",
"sectionNumber": 52
},
{
"text": "KIIIA1 7.1.1 / 01 Arcelin G 2009a Abamectin SC (A12115I) - Acute Oral Toxicity Study in the Rat (Up and Down Procedure) Syngenta - Jealotts Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C31684 GLP, not published Syngenta File No A12115I_10020 Y Y Y SYN",
"sectionNumber": 53
},
{
"text": "KIIIA1 7.1.2 / 01 Arcelin G 2010 Abamectin SC(A12115I) - Acute Toxicity Study in Rats Syngenta - Jealotts Hill, Bracknell, United Kingdom Harlan Laboratories Ltd., 4414 Fullinsdorf, Switzerland, C79856 GLP, not published Syngenta File No A12115I_10021 Y Y Y SYN",
"sectionNumber": 54
},
{
"text": "KIIIA1 7.1.3 / 01 Shaw D 2009 Abamectin SC (A12115I) - Acute 4 Hour (Nose Only) Inhalation Study In The Rat Syngenta - Jealotts Hill, Bracknell, United Kingdom Covance Laboratories, Harrogate, United Kingdom, 8202-064, T000153-09 GLP, not published Syngenta File No A12115I_10011 Y Y Y SYN",
"sectionNumber": 55
},
{
"text": "KIIIA1 7.1.4 / 01 Arcelin G. 2009b Abamectin SC (A121151) - Primary Skin Irritation Study in Rabbits (4 Hour Semi-Occlusive Application) Syngenta RCC Ltd., Füllinsdorf, Switzerland, C46613 GLP, not published Syngenta File No A12115I_10015 Y Y Y SYN",
"sectionNumber": 56
},
{
"text": "KIIIA1 7.1.5 / 01 Arcelin G. 2009c Abamectin SC (A121151) - Primary Eye Irritation Study in Rabbits Y Y Y SYN",
"sectionNumber": 57
},
{
"text": "Syngenta RCC Ltd., Füllinsdorf, Switzerland, C46624 GLP, not published Syngenta File No A12115I_10016",
"sectionNumber": 59
},
{
"text": "KIIIA1 7.1.6 / 01 Arcelin G 2009d Abamectin SC (A12115I) - Contact Hypersensitivty in Albino Guinea Pigs, Buehler Test (9-induction) Syngenta - Jealotts Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C46635 GLP, not published Syngenta File No A12115I_10019 Y Y Y SYN",
"sectionNumber": 60
},
{
"text": "A.6 Toxicology and metabolism data Abamectin Volume 2 12 Abamectin Volume 2 13",
"sectionNumber": 62
},
{
"text": "OECD data point number / reference number Author(s) Year Title Source (where different from company) Company, Report No GLP or GEP status (where relevant), Published or not Data Protection Claimed Y/N Owner",
"sectionNumber": 63
},
{
"text": "KIIIA1 8.1.1 / 01 Kwiatkowski A., Hill S. 2007 Abamectin - Storage Stability in Crops Stored Deep Frozen for up to Two Years - Final Report Syngenta Crop Protection AG, Basel, Switzerland Syngenta - Jealotts Hill International, Bracknell, Berkshire, United Kingdom, T022438-04-REG 05-S504 GLP, not published Syngenta File No MK936/1798 Y SYN",
"sectionNumber": 64
},
{
"text": "KIIIA1 8.3.1 / 01 Schulz H. 2010a Abamectin - Residue Study on Protected Tomatoes in Germany and the United Kingdom in 2009 Syngenta SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001014- 09-REG GLP, not published Syngenta File No A12115I_10028 Y SYN",
"sectionNumber": 65
},
{
"text": "KIIIA1 8.3.2 / 01 Schulz H. 2010b Abamectin - Residue Study on Protected Pepper in Germany and the United Kingdom in 2009 Syngenta - Jealotts Hill, Bracknell, United Kingdom SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001977- 09-REG GLP, not published Syngenta File No A12115I_10027 Y SYN",
"sectionNumber": 66
},
{
"text": "KIIIA1 8.3.4 / 01 Schulz H 2010c Abamectin - Residue Study on Protected Cucumber in Germany and the United Kingdom in 2009 Syngenta - Jealotts Hill, Bracknell, United Kingdom SGS INSTITUT FRESENIUS GmbH, Im Maisel 14, D-65232 Taunusstein, Germany, T001016- 09-REG GLP, not published Syngenta File No A12115I_10026 Y SYN",
"sectionNumber": 67
},
{
"text": "OECD data point number / reference number Author(s) Year Title Source (where different from company) Company, Report No GLP or GEP status (where relevant), Published or not Data Protection Claimed Y/N Owner",
"sectionNumber": 69
},
{
"text": "KIIIA1 8.3.5 / 01 Schulz H. 2012a Abamectin - Residue Study on Protected Melon in Southern France and Spain in 2011 Syngenta SGS Institut Fresenius GmbH, Taunusstein, Germany, TK0055923-REG GLP, not published Syngenta File No A12115I_10064 Y SYN",
"sectionNumber": 70
},
{
"text": "KIIIA1 8.3.6 / 01 Schulz H. 2012b Abamectin - Residue Study on Protected Beans with Pods (Fresh) in Spain in 2011 Syngenta SGS Institut Fresenius GmbH, Taunusstein, Germany, TK0055927-REG GLP, not published Syngenta File No A12115I_10063 Y SYN",
"sectionNumber": 71
},
{
"text": "A.7 Residue data Abamectin Volume 2 14 Abamectin Volume 2 15",
"sectionNumber": 73
},
{
"text": "Annex point(s) Author(s) Year Title Source Report ID GLP or GEP status Published or not Data Protection Claimed Y/N Owner",
"sectionNumber": 74
},
{
"text": "KIIIA1 9.6.1 / 01 Mason D, Li Z 2010 Abamectin - A Leaching Assessment for Parent and Soil Metabolites NOA448111, NOA448112, NOA457464 and NOA457465 Using the FOCUS Groundwater Scenarios Following Application to Grass Syngenta - Jealotts Hill, Bracknell, United Kingdom Syngenta - Jealotts Hill, Bracknell, United Kingdom, RAJ0775B Not GLP, not published Syngenta File No NOA422601_10007 This is CONFIDENTIAL INFORMATION* N SYN",
"sectionNumber": 75
},
{
"text": "KIIIA1 9.6.1 / 02 Mason D 2010 Abamectin - Predicted Environmental Concentrations in Groundwater for Abamectin using the FOCUS Groundwater Scenarios Following Application to Grass Syngenta - Jealotts Hill, Bracknell, United Kingdom Syngenta - Jealotts Hill, Bracknell, United Kingdom, TK0006924_1 Not GLP, not published Syngenta File No NOA422601_10016 This is CONFIDENTIAL INFORMATION* N SYN",
"sectionNumber": 76
},
{
"text": "KIIIA1 9.6.1 / 03 Wallace D. 2012 Abamectin - Predicted Environmental Concentration of metabolite U8 in ground water Syngenta European Product Registration, Basel, Switzerland, Not GLP, not published Syngenta File No NOA422601_10021 This is CONFIDENTIAL INFORMATION* N SYN",
"sectionNumber": 77
},
{
"text": "KIIIA1 9.6.1 / 04 Carnall J. 2014 Abamectin - A Leaching Assessment for Parent and Soil Metabolites NOA448111, NOA448112, NOA457464 and NOA457465 Using the FOCUS Groundwater Scenarios Following Drip Irrigation Application to Beans and Tomatoes in the EU DRAFT Report Y SYN",
"sectionNumber": 78
},
{
"text": "KIIIA 9.6.2/1 Patterson D. 2014 Abamectin A Fate assessment for Parent using the FOCUS Surface Water Scenarios at Step 3 Following Application to Walk-in Tunnel Cultivated Fruiting Vegetables. Y SYN",
"sectionNumber": 79
},
{
"text": "A.8 Environmental fate and behaviour * Syngenta requests data confidentiality for these data. Disclosure of the information might undermine Syngenta commercial interests by providing access to Syngenta specific know-how used to develop unique positions and approaches to risk assessments Abamectin Volume 2 16",
"sectionNumber": 81
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
"sectionNumber": 82
},
{
"text": "KIIIA1 10.1.6 / 01 Hubbard P., Beavers J. 2011 Abamectin SC (A12115I) - An acute oral toxicity study with the northern bobwhite using a sequential testing procedure Syngenta Wildlife International Ltd., Easton, Maryland 21601, USA, 528-362 GLP, not published Syngenta File No A12115I_10034 Y SYN",
"sectionNumber": 83
},
{
"text": "KIIIA1 10.2.2.1 / 01 Liedtke A. 2011 Abamectin SC (A12115I) - Acute toxicity to rainbow trout (Oncorhynchus mykiss) in a 96-hour test Syngenta Harlan Laboratories Ltd., Itingen, Switzerland, D36411 GLP, not published Syngenta File No A12115I_10041 Y SYN",
"sectionNumber": 84
},
{
"text": "KIIIA1 10.2.2.2 / 01 Hoger S 2010 Abamectin SC (A12115I) - Acute toxicity to Daphnia magna in a 48-hour immobilization test Syngenta - Jealotts Hill, Bracknell, United Kingdom Harlan Laboratories Ltd., Zelgliweg 1, 4452 Itingen, Switzerland, C86663 GLP, not published Syngenta File No A12115I_10025 Y SYN",
"sectionNumber": 85
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
"sectionNumber": 87
},
{
"text": "KIIIA1 10.2.2.3 / 01 Liedtke A. 2011a Abamectin SC (A12115I) - Toxicity to Pseudokirchneriella subcapitata in a 96-hour algal growth inhibition test Syngenta Harlan Laboratories Ltd., Itingen, Switzerland, D36398 GLP, not published Syngenta File No A12115I_10038 Y SYN",
"sectionNumber": 88
},
{
"text": "KIIIA1 10.2.3 / 01 Rufli H. 1999 Assessment of the potential biological effects of Abamectin (MK936, 018 EC) (A-8612 A) exposures on aquatic ecosystems as measured in an outdoor microcosm tank system Novartis Crop Protection AG, Basel, Switzerland Novartis Crop Protection AG, Basel, Switzerland, 982570 GLP, not published Syngenta File No MK936/0638 Y SYN",
"sectionNumber": 89
},
{
"text": "KIIIA1 10.2.3 / 02 Knauer K. 2002 Assessment of the Effects of Abamectin 018 EC (A8612A) in Outdoor Microcosms Syngenta Crop Protection AG, Basel, Switzerland Syngenta Crop Protection AG, Basel, Switzerland, 2002590 GLP, not published Syngenta File No MK936/0817 Y SYN",
"sectionNumber": 90
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
"sectionNumber": 92
},
{
"text": "KIIIA1 10.3.2.1 / 01 Arcelin G. 2009 Abamectin SC (A12115I) - Acute Oral Toxicity Study in the Rat (Up and Down Procedure) Syngenta - Jealotts Hill, Bracknell, United Kingdom RCC Ltd., Füllinsdorf, Switzerland, C31684 GLP, not published Syngenta File No A12115I_10020 Y SYN",
"sectionNumber": 93
},
{
"text": "KIIIA1 10.4.2.1 / 01 Kling A. 2011 Abamectin SC (A12115I) - Acute oral and contact toxicity to the honeybee Apis mellifera L. in the laboratory Syngenta Eurofins Agroscience Services GmbH, NiefernÖschel., Germany, S11- 02876 GLP, not published Syngenta File No A12115I_10035 Y SYN",
"sectionNumber": 94
},
{
"text": "KIIIA1 10.5.2 / 01 Fussell S. 2004 MK936 (abamectin): a rateresponse extended laboratory test to determine the effects of an 18 g/L EC formulation (A8612AB) on the parasitic wasp Aphidius rhopalosiphi Syngenta Crop Protection AG, Basel, Switzerland Mambo-Tox. Ltd., Southampton, United Kingdom, SYN-04-1 2032631 GLP, not published Syngenta File No MK936/1105 Y SYN",
"sectionNumber": 95
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
"sectionNumber": 97
},
{
"text": "KIIIA1 10.5.2 / 02 Waterman L. 2004 MK936 (abamectin): A rateresponse extended laboratory test to determine the effects of an 18 g/L EC formulation (A8612AB) on the predatory mite Typhlodromus pyri Syngenta Crop Protection AG, Basel, Switzerland Mambo-Tox. Ltd., Southampton, United Kingdom, SYN-04-2 GLP, not published Syngenta File No MK936/1106 Y SYN",
"sectionNumber": 98
},
{
"text": "KIIIA1 10.5.2 / 03 Reber B. 1999 Acute toxicity of MK 936 EC 018 (A-8612 A) to the predatory ground beetle Poecilus cupreus L. (Coleoptera: carabidae) Novartis Crop Protection AG, Basel, Switzerland Novartis Crop Protection AG, Basel, Switzerland, 982611 GLP, not published Syngenta File No MK936/0626 Y SYN",
"sectionNumber": 99
},
{
"text": "KIIIA1 10.5.2 / 04 Kuhner C. 1998 Vertimec EC 0.18 (A-8612 A): combination of a semifield and an extented laboratory study (field aged residue) to evaluate the effects on the ground beetle, poecilus cupreus L. (Coleoptera, Carabidae) Novartis Crop Protection AG, Basel, Switzerland GAB Biotechnologie GmbH, Niefern, Germany, 98247/01-NEPc GLP, not published Syngenta File No MK936/0540 Y SYN",
"sectionNumber": 100
},
{
"text": "Annex point Author Year Title Source (where different from company) Company, Report No. GLP or GEP status (where relevant) Published or Unpublished Data protection claimed Y/N Owner (SYN = Syngenta)",
"sectionNumber": 102
},
{
"text": "KIIIA1 10.6.2 / 01 Friedrich S. 2011 Abamectin SC (A12115I) - Acute toxicity to the earthworm Eisenia fetida Syngenta BioChem Agrar, Gerichshain, Germany, 11 10 48 099 S GLP, not published Syngenta File No A12115I_10037 Y SYN",
"sectionNumber": 103
},
{
"text": "10.6.6 McCormac, A 2014 Abamectin SC (A13796I) A laboratory test to determine the effects of fresh residues on the springtail Folsomia candida (Collembola, Isotomidae), Syngenta Ltd., Report Number SYN-14-3, GLP, not published Y SYN",
"sectionNumber": 104
},
{
"text": "10.6.6 Vinall, S 2014 Abamectin SC (A13796I) A laboratory test to determine the effects of fresh residues on the predatory mite Hypoaspis aculeifer (Acari, Laelapidae), Syngenta Ltd. Report Number SYN-14-2, GLP, Not published Y SYN",
"sectionNumber": 105
},
{
"text": "A.9 Ecotoxicology data Abamectin Volume 2 17 Abamectin Volume 2 18 Abamectin Volume 2 19 Abamectin Volume 2 20",
"sectionNumber": 107
},
{
"text": "KIIIA1 Rafael 2009b Abamectin - Fe chelate - Y SYN",
"sectionNumber": 108
},
{
"text": "KIIIA1 Pedro Janer 2011 Tervigo - registration trials on Y SYN",
"sectionNumber": 109
}
],
"numberOfPages": 20
}

File diff suppressed because it is too large Load Diff

View File

@ -2,204 +2,204 @@
"numberOfPages": 9,
"sectionTexts": [
{
"sectionNumber": "1",
"sectionNumber": 1,
"text": "Rule 0: Expand CBI Authors with firstname initials F. Lastname, J. Doe, M. Mustermann Lastname M., Doe J. Mustermann M."
},
{
"sectionNumber": "2",
"sectionNumber": 2,
"text": "Rule 1/2: Redact CBI Authors based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No David Ksenia Max Mustermann Ranya Eikenboom Charalampos Schenk Tanja Schmitt ← should not be annotated, not in Dictionary"
},
{
"sectionNumber": "3",
"sectionNumber": 3,
"text": "Rule 3/4: Redact (not) CBI Add/ress based on Dict Dont Redact (mark as skipped) when Vertebrate Study is No Redact when Vertebrate Study is Yes Warnsveld, 7232 CX Warnsveld, Netherlands, NL Institut Industries, 33 Rue Jean Baffier, 18000 Bourges, France, FR 4-6 Chem. des Varennes, 18300 Saint-Satur, France, FR Lesdo Industries, Chäppelisträssli, 6078 Lungern, Switzerland Shlissel'burgskaya Ulitsa, Nizhny Novgorod Oblast, Russia, 603034, RU Karl Johans Gate 11, 0154 Oslo, Norway, NOR ← should not be annotated, not in Dictionary"
},
{
"sectionNumber": "4",
"sectionNumber": 4,
"text": "Rule 5: Do not redact genitive CBI_authors (Entries based on Dict) Expand to Hint Clarissas Donut ← not added to Dict, should be not annotated Simpson's Tower ← added to Authors-Dict, should be annotated"
},
{
"sectionNumber": "5",
"sectionNumber": 5,
"text": "Reference No Author(s) Year Title Laboratory"
},
{
"sectionNumber": "6",
"sectionNumber": 6,
"text": "BR2 /2 Michael N. 1998 The role of physical education in the school system. Weyland Industries"
},
{
"sectionNumber": "7",
"sectionNumber": 7,
"text": "BR3 /5 Funnarie B. 2001 It should be illegal to produce and sell tobacco Authentic Diagnostics"
},
{
"sectionNumber": "8",
"sectionNumber": 8,
"text": "ZZ/12 Feuer A. 1989 Social media is the real cause of teenage depression. Tyrell Corporation"
},
{
"sectionNumber": "10",
"sectionNumber": 10,
"text": "Rule 6-11 (Authors Table) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No"
},
{
"sectionNumber": "11",
"sectionNumber": 11,
"text": "Rule 12/13: Redact/Hint if CTL or BL was found Redact when Vertebrate Study is Yes Hint when Vertebrate Study is No CTL/without dictionary entry CTL without Slash BL/without dictionary entry BL without Slash CTL/with dictionary entry 1234 with Slash CTL with dictionary entry 5678 without Slash BL/with dictionary entry 1234 with Slash BL with dictionary entry 5678 without Slash"
},
{
"sectionNumber": "12",
"sectionNumber": 12,
"text": "Rule 14/15: Redact and add recommendation for et al. Redact Term “Desiree”, “Melanie” and add to Recommendation CBI Authors if Vertebrate Study is Yes & No Lorem ipsum dolor sit amet, consectetur adipiscing elit Desiree et al sed do eiusmod tempor incididunt ut labore et dolore magna aliqua Melanie et al. Reference No 12345 Lorem ipsum."
},
{
"sectionNumber": "13",
"sectionNumber": 13,
"text": "Rule 16/17: Add recommendation for Addresses in Test Organism/Animals sections Recommend only if Vertebrate Study is Yes, else do nothing Lorem ipsum dolor sit Species: Mouse; Source: Stark Industries"
},
{
"sectionNumber": "14",
"sectionNumber": 14,
"text": "Rule 16/17 (additional) negative Test; missing first Key Nothing should happen because of missing first/second keyword according to the rules Dont redact here because of missing first key; Source: Omni Consumer Products Dont redact here because missing first keyword; Source Resources Development Administration"
},
{
"sectionNumber": "15",
"sectionNumber": 15,
"text": "Rule 16/17 (additional) negative Test; missing second Key Dont redact here because of missing second key; Species: Mouse; Omni Consumer Products Dont redact here because missing second keyword; Species: Mouse, Resources Development Administration"
},
{
"sectionNumber": "16",
"sectionNumber": 16,
"text": "Rule 18: Do not redact Names and Addresses if Published Information found Do not redact Names and Addresses if Published Information found Lorem ipsum dolor sit amet Oxford University Press in voluptate velit esse cillum. Iakovos Geiger, Julian Ritter, Asya Lyon, Carina Madsen, Alexandra Häusler, Hanke Mendel, Ranya Eikenboom. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Min Kwok, Jun K., Tu Wong, Qiang Suen, Zhou Mah, Ning Liu, Lei W. Huang, Ru X. Wu"
},
{
"sectionNumber": "17",
"sectionNumber": 17,
"text": "Rule 19/20: Redacted PII Personal Identification Information based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Naka-27 Aomachi, Nomi, Ishikawa 923-1101, Japan, JP Sude Halide Nurullah Özgür U. Reyhan B. Rahim C. J. Alfred Xinyi Y. Tao Clara Siegfried ← not added to Dict, should be not annotated"
},
{
"sectionNumber": "18",
"sectionNumber": 18,
"text": "Rule 21/22: Redact Emails by RegEx Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Duis aute irure dolor in library@outlook.com reprehenderit in voluptate gordonjcp@msn.com velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint dinther@comcast.net occaecat cupidatat non proident, sunt in kawasaki@me.com culpa qui officia deserunt mollit anim id est laborum."
},
{
"sectionNumber": "19",
"sectionNumber": 19,
"text": "Description Text Contact Point"
},
{
"sectionNumber": "20",
"sectionNumber": 20,
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
},
{
"sectionNumber": "22",
"sectionNumber": 22,
"text": "Rule 23/24: Redact contact information (contains \"Contact point:\") Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Contact-Information was found should be appears”"
},
{
"sectionNumber": "23",
"sectionNumber": 23,
"text": "Description Text Applicant"
},
{
"sectionNumber": "24",
"sectionNumber": 24,
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
},
{
"sectionNumber": "26",
"sectionNumber": 26,
"text": "Rule 25/26: Redact contact information (contains \"Applicant\" as Headline or Text) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Applicant Information was found should be appears” Applicant Name: Soylent Corporation Contact point: Riddley Scott Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: food-industry@korea.com Contact: This is a special case, everything between this and the next keyword should be redacted Tel.: +275 5678 1234 132 fsdfdfre frefref"
},
{
"sectionNumber": "27",
"sectionNumber": 27,
"text": "Rule 27/28: Redact contact Information (contains Producer) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Producer was found” should be appears Producer of the plant production Name: Umbrella Corporation Contact: Jill Valentine Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: pharma-industry@korea.com"
},
{
"sectionNumber": "28",
"sectionNumber": 28,
"text": "Rule 29/30/31/32: If Text contains \"AUTHORS:\" and \"COMPLETION DATES\" but not \"STUDY COMPLETION DATES\", then Redact between both Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ AUTHOR(S): Dr. Alan Grant COMPLETION DATE: 02 December 1997"
},
{
"sectionNumber": "29",
"sectionNumber": 29,
"text": "Rule 29/30/31/32: (additional) negative Test for Study completion dates No Redaction should be appears here Study Report___ AUTHOR(S): Dr. Alan Grant STUDY COMPLETION DATE: 02 December 1997"
},
{
"sectionNumber": "30",
"sectionNumber": 30,
"text": "Rule 33/34: If Text contains \"Performing Lab\" and \"Lab Project ID\", then Redact everything between Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ PERFORMING LABORATORY: Umbrella Corporation LABORATORY PROJECT ID: Number 20201/33991/ERZAT/21"
},
{
"sectionNumber": "31",
"sectionNumber": 31,
"text": "Rule 35/36/37/38: ?? Tba"
},
{
"sectionNumber": "32",
"sectionNumber": 32,
"text": "Rule 39: Purity Hint Add Purity as Hint when Percent-Numbers is there Test Item: Soda Purity: 45% ← should be Hint Purity: <45% ← should be Hint Purity: >45% ← should be Hint Purity: 101% ← should ne be Hint because >100 % is not possible Purity: =>45% ← should be not Hint because additional symbols Purity: =<45% ← should be not Hint because additional symbols Purity: aa 45% ← should be not Hint because additional symbols Purity: 45% aa ← should be not Hint because additional symbols Purity: aa45% ← should be not Hint because additional symbols Purity: 45%aa ← should be not Hint because additional symbols Product-Code: EAK-L443 purity: 99% ← not Hint because case sensitive purity: >99% ← not Hint because case sensitive purity: <99% ← not Hint because case sensitive Supplier: GreenForce"
},
{
"sectionNumber": "33",
"sectionNumber": 33,
"text": "Rule 40: Ignore Dossier-Redaction if Confidentiality is not set Dont redact Dossier-Redaction if Confidentiality is not set in file attributes Excepteur sint occaecat cupidatat non proident, myDossierRedaction sunt in culpa qui officia deserunt mollit anim id est laborum."
},
{
"sectionNumber": "34",
"sectionNumber": 34,
"text": "Rule 41/42: Redact Signatures Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No __________________________ __________________________ Signed by: Dilara Sonnenschein Signed by: Tobias Müller"
},
{
"sectionNumber": "35.1.1.3",
"sectionNumber": 35,
"text": "Rule 43: Redact Logo Redact Logo only if Vertebrate Study is Yes, else do nothing (skipped)"
},
{
"sectionNumber": "36",
"sectionNumber": 36,
"text": "This is a Page-Header"
},
{
"sectionNumber": "37",
"sectionNumber": 37,
"text": "This is a Page-Header"
},
{
"sectionNumber": "38",
"sectionNumber": 38,
"text": "This is a Page-Header"
},
{
"sectionNumber": "39",
"sectionNumber": 39,
"text": "This is a Page-Header"
},
{
"sectionNumber": "40",
"sectionNumber": 40,
"text": "This is a Page-Header"
},
{
"sectionNumber": "41",
"sectionNumber": 41,
"text": "This is a Page-Header"
},
{
"sectionNumber": "42",
"sectionNumber": 42,
"text": "This is a Page-Header"
},
{
"sectionNumber": "43",
"sectionNumber": 43,
"text": "This is a Page-Header"
},
{
"sectionNumber": "44",
"sectionNumber": 44,
"text": "This is a Page-Header"
},
{
"sectionNumber": "45",
"sectionNumber": 45,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "46",
"sectionNumber": 46,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "47",
"sectionNumber": 47,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "48",
"sectionNumber": 48,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "49",
"sectionNumber": 49,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "50",
"sectionNumber": 50,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "51",
"sectionNumber": 51,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "52",
"sectionNumber": 52,
"text": "This is a Page-Footer"
},
{
"sectionNumber": "53",
"sectionNumber": 53,
"text": "This is a Page-Footer"
}
]
}
}

View File

@ -1,5 +0,0 @@
rootProject.name = "search-service"
include(":search-service-api-v1")
include(":search-service-server-v1")
project(":search-service-api-v1").projectDir = file("search-service-v1/search-service-api-v1")
project(":search-service-server-v1").projectDir = file("search-service-v1/search-service-server-v1")