Compare commits
73 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2e8020c9b | ||
|
|
e7f84c28d6 | ||
|
|
779acf6202 | ||
|
|
ff626afe78 | ||
|
|
ebf13d3be1 | ||
|
|
0693ddf197 | ||
|
|
c6d0361678 | ||
|
|
380b62333c | ||
|
|
f7a549c1a3 | ||
|
|
55e0386e31 | ||
|
|
599ed66773 | ||
|
|
8e717a5067 | ||
|
|
0a7b6cddb2 | ||
|
|
87b585c354 | ||
|
|
43bb08e301 | ||
|
|
d116d99db7 | ||
|
|
e0fb825cf7 | ||
|
|
18abdedaf8 | ||
|
|
fe28c1463a | ||
|
|
2157751b3e | ||
|
|
74f971757c | ||
|
|
79be8e272b | ||
|
|
d10b5b26e8 | ||
|
|
fd1cb84ebb | ||
|
|
7abf08e13b | ||
|
|
1cef042c1e | ||
|
|
4296abab94 | ||
|
|
3f7373f55d | ||
|
|
496c6a1da8 | ||
|
|
2234a5dbd8 | ||
|
|
03c6248b84 | ||
|
|
e59012ba4b | ||
|
|
e931522014 | ||
|
|
c877eea3cc | ||
|
|
dd53eeff93 | ||
|
|
a41a584499 | ||
|
|
9d6da4e7a3 | ||
|
|
25c90d1312 | ||
|
|
36e2ce5196 | ||
|
|
bb92e423f1 | ||
|
|
2a96ba5477 | ||
|
|
d4a77d3f70 | ||
|
|
982846c0c3 | ||
|
|
731aa80b69 | ||
|
|
9ee4c8cd91 | ||
|
|
e24dfe7fbe | ||
|
|
bf9540a986 | ||
|
|
0c66b4e505 | ||
|
|
64cbe4aaf9 | ||
|
|
01ab04ece7 | ||
|
|
1c21f72a08 | ||
|
|
bd690d6cc9 | ||
|
|
c44bd014d3 | ||
|
|
60842f58d0 | ||
|
|
ec56f833ff | ||
|
|
ee5cf2ff5a | ||
|
|
665d27085a | ||
|
|
ee698a9647 | ||
|
|
28ab0dc8cc | ||
|
|
da080f9671 | ||
|
|
4ae80fdffc | ||
|
|
c144f26099 | ||
|
|
eba029928e | ||
|
|
6b58e8b894 | ||
|
|
c72c881e6e | ||
|
|
f518d4ff9c | ||
|
|
65ccdb8257 | ||
|
|
4fcd310a66 | ||
|
|
be1b0eb27d | ||
|
|
b09132d5bf | ||
|
|
173428771b | ||
|
|
b1b42ce0d7 | ||
|
|
afd40a7cd7 |
9
.gitignore
vendored
9
.gitignore
vendored
@ -26,3 +26,12 @@
|
||||
**/.DS_Store
|
||||
**/classpath-data.json
|
||||
**/dependencies-and-licenses-overview.txt
|
||||
|
||||
|
||||
gradle.properties
|
||||
gradlew
|
||||
gradlew.bat
|
||||
gradle/
|
||||
|
||||
**/.gradle
|
||||
**/build
|
||||
|
||||
23
.gitlab-ci.yml
Normal file
23
.gitlab-ci.yml
Normal file
@ -0,0 +1,23 @@
|
||||
variables:
|
||||
SONAR_PROJECT_KEY: 'RED_search-service'
|
||||
include:
|
||||
- project: 'gitlab/gitlab'
|
||||
ref: 'main'
|
||||
file: 'ci-templates/gradle_java.yml'
|
||||
|
||||
deploy:
|
||||
stage: deploy
|
||||
tags:
|
||||
- dind
|
||||
script:
|
||||
- echo "Building with gradle version ${BUILDVERSION}"
|
||||
- gradle -Pversion=${BUILDVERSION} publish
|
||||
- gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=${BUILDVERSION}
|
||||
- echo "BUILDVERSION=$BUILDVERSION" >> version.env
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: version.env
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release/
|
||||
- if: $CI_COMMIT_TAG
|
||||
@ -1,37 +0,0 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs-parent</artifactId>
|
||||
<version>8.1.3</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
|
||||
<artifactId>bamboo-specs</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<!-- run 'mvn test' to perform offline validation of the plan -->
|
||||
<!-- run 'mvn -Ppublish-specs' to upload the plan to your Bamboo server -->
|
||||
</project>
|
||||
@ -1,129 +0,0 @@
|
||||
package buildjob;
|
||||
|
||||
import static com.atlassian.bamboo.specs.builders.task.TestParserTask.createJUnitParserTask;
|
||||
|
||||
import java.time.LocalTime;
|
||||
|
||||
import com.atlassian.bamboo.specs.api.BambooSpec;
|
||||
import com.atlassian.bamboo.specs.api.builders.BambooKey;
|
||||
import com.atlassian.bamboo.specs.api.builders.Variable;
|
||||
import com.atlassian.bamboo.specs.api.builders.docker.DockerConfiguration;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.PermissionType;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.Permissions;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.PlanPermissions;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Job;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.PlanIdentifier;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Stage;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.branches.BranchCleanup;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.branches.PlanBranchManagement;
|
||||
import com.atlassian.bamboo.specs.api.builders.project.Project;
|
||||
import com.atlassian.bamboo.specs.builders.task.CheckoutItem;
|
||||
import com.atlassian.bamboo.specs.builders.task.InjectVariablesTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.ScriptTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.VcsCheckoutTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.VcsTagTask;
|
||||
import com.atlassian.bamboo.specs.builders.trigger.BitbucketServerTrigger;
|
||||
import com.atlassian.bamboo.specs.builders.trigger.ScheduledTrigger;
|
||||
import com.atlassian.bamboo.specs.model.task.InjectVariablesScope;
|
||||
import com.atlassian.bamboo.specs.model.task.ScriptTaskProperties.Location;
|
||||
import com.atlassian.bamboo.specs.util.BambooServer;
|
||||
|
||||
/**
|
||||
* Plan configuration for Bamboo.
|
||||
* Learn more on: <a href="https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs">https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs</a>
|
||||
*/
|
||||
@BambooSpec
|
||||
public class PlanSpec {
|
||||
|
||||
private static final String SERVICE_NAME = "search-service";
|
||||
|
||||
private static final String JVM_ARGS = " -Xmx4g -XX:+ExitOnOutOfMemoryError -XX:SurvivorRatio=2 -XX:NewRatio=1 -XX:InitialTenuringThreshold=16 -XX:MaxTenuringThreshold=16 -XX:InitiatingHeapOccupancyPercent=35 ";
|
||||
|
||||
private static final String SERVICE_KEY = SERVICE_NAME.toUpperCase().replaceAll("-", "");
|
||||
|
||||
|
||||
/**
|
||||
* Run main to publish plan on Bamboo
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
//By default credentials are read from the '.credentials' file.
|
||||
BambooServer bambooServer = new BambooServer("http://localhost:8085");
|
||||
|
||||
Plan plan = new PlanSpec().createPlan();
|
||||
bambooServer.publish(plan);
|
||||
PlanPermissions planPermission = new PlanSpec().createPlanPermission(plan.getIdentifier());
|
||||
bambooServer.publish(planPermission);
|
||||
|
||||
Plan secPlan = new PlanSpec().createSecBuild();
|
||||
bambooServer.publish(secPlan);
|
||||
PlanPermissions secPlanPermission = new PlanSpec().createPlanPermission(secPlan.getIdentifier());
|
||||
bambooServer.publish(secPlanPermission);
|
||||
}
|
||||
|
||||
|
||||
private PlanPermissions createPlanPermission(PlanIdentifier planIdentifier) {
|
||||
|
||||
Permissions permission = new Permissions().userPermissions("atlbamboo",
|
||||
PermissionType.EDIT,
|
||||
PermissionType.VIEW,
|
||||
PermissionType.ADMIN,
|
||||
PermissionType.CLONE,
|
||||
PermissionType.BUILD)
|
||||
.groupPermissions("development", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("devplant", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.loggedInUserPermissions(PermissionType.VIEW)
|
||||
.anonymousUserPermissionView();
|
||||
return new PlanPermissions(planIdentifier.getProjectKey(), planIdentifier.getPlanKey()).permissions(permission);
|
||||
}
|
||||
|
||||
|
||||
private Project project() {
|
||||
|
||||
return new Project().name("RED").key(new BambooKey("RED"));
|
||||
}
|
||||
|
||||
|
||||
public Plan createPlan() {
|
||||
|
||||
return new Plan(project(), SERVICE_NAME, new BambooKey(SERVICE_KEY)).description("Plan created from (enter repository url of your plan)")
|
||||
.variables(new Variable("maven_add_param", ""))
|
||||
.stages(new Stage("Default Stage").jobs(new Job("Default Job", new BambooKey("JOB1")).tasks(new ScriptTask().description("Clean")
|
||||
.inlineBody("#!/bin/bash\n" + "set -e\n" + "rm -rf ./*"),
|
||||
new VcsCheckoutTask().description("Checkout Default Repository").cleanCheckout(true).checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask().description("Build").location(Location.FILE).fileFromPath("bamboo-specs/src/main/resources/scripts/build-java.sh").argument(SERVICE_NAME),
|
||||
createJUnitParserTask().description("Resultparser")
|
||||
.resultDirectories("**/test-reports/*.xml, **/target/surefire-reports/*.xml, **/target/failsafe-reports/*.xml")
|
||||
.enabled(true),
|
||||
new InjectVariablesTask().description("Inject git Tag").path("git.tag").namespace("g").scope(InjectVariablesScope.LOCAL),
|
||||
new VcsTagTask().description("${bamboo.g.gitTag}").tagName("${bamboo.g.gitTag}").defaultRepository())
|
||||
.dockerConfiguration(new DockerConfiguration().image("nexus.iqser.com:5001/infra/maven:3.8.4-openjdk-17-slim")
|
||||
.dockerRunArguments("--net=host")
|
||||
.volume("/etc/maven/settings.xml", "/usr/share/maven/conf/settings.xml")
|
||||
.volume("/var/run/docker.sock", "/var/run/docker.sock"))))
|
||||
.linkedRepositories("RED / " + SERVICE_NAME)
|
||||
|
||||
.triggers(new BitbucketServerTrigger())
|
||||
.planBranchManagement(new PlanBranchManagement().createForVcsBranch()
|
||||
.delete(new BranchCleanup().whenInactiveInRepositoryAfterDays(14))
|
||||
.notificationForCommitters());
|
||||
}
|
||||
|
||||
|
||||
public Plan createSecBuild() {
|
||||
|
||||
return new Plan(project(), SERVICE_NAME + "-Sec", new BambooKey(SERVICE_KEY + "SEC")).description("Security Analysis Plan")
|
||||
.stages(new Stage("Default Stage").jobs(new Job("Default Job", new BambooKey("JOB1")).tasks(new ScriptTask().description("Clean")
|
||||
.inlineBody("#!/bin/bash\n" + "set -e\n" + "rm -rf ./*"),
|
||||
new VcsCheckoutTask().description("Checkout Default Repository").cleanCheckout(true).checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask().description("Sonar").location(Location.FILE).fileFromPath("bamboo-specs/src/main/resources/scripts/sonar-java.sh").argument(SERVICE_NAME))
|
||||
.dockerConfiguration(new DockerConfiguration().image("nexus.iqser.com:5001/infra/maven:3.8.4-openjdk-17-slim")
|
||||
.dockerRunArguments("--net=host")
|
||||
.volume("/etc/maven/settings.xml", "/usr/share/maven/conf/settings.xml")
|
||||
.volume("/var/run/docker.sock", "/var/run/docker.sock"))))
|
||||
.linkedRepositories("RED / " + SERVICE_NAME)
|
||||
.triggers(new ScheduledTrigger().scheduleOnceDaily(LocalTime.of(23, 00)))
|
||||
.planBranchManagement(new PlanBranchManagement().createForVcsBranchMatching("release.*").notificationForCommitters());
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,60 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SERVICE_NAME=$1
|
||||
|
||||
if [[ "$bamboo_planRepository_branchName" == "master" ]]
|
||||
then
|
||||
branchVersion=$(cat pom.xml | grep -Eo " <version>.*-SNAPSHOT</version>" | sed -s 's|<version>\(.*\)\..*\(-*.*\)</version>|\1|' | tr -d ' ')
|
||||
latestVersion=$( semver $(git tag -l "${branchVersion}.*" ) | tail -n1 )
|
||||
newVersion="$(semver $latestVersion -p -i minor)"
|
||||
elif [[ "$bamboo_planRepository_branchName" == release* ]]
|
||||
then
|
||||
branchVersion=$(echo $bamboo_planRepository_branchName | sed -s 's|release\/\([0-9]\+\.[0-9]\+\)\.x|\1|')
|
||||
latestVersion=$( semver $(git tag -l "${branchVersion}.*" ) | tail -n1 )
|
||||
newVersion="$(semver $latestVersion -p -i patch)"
|
||||
elif [[ "${bamboo_version_tag}" != "dev" ]]
|
||||
then
|
||||
newVersion="${bamboo_version_tag}"
|
||||
else
|
||||
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
--no-transfer-progress \
|
||||
${bamboo_maven_add_param} \
|
||||
clean install \
|
||||
-Djava.security.egd=file:/dev/./urandomelse
|
||||
echo "gitTag=${bamboo_planRepository_1_branch}_${bamboo_buildNumber}" > git.tag
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "gitTag=${newVersion}" > git.tag
|
||||
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
${bamboo_maven_add_param} \
|
||||
versions:set \
|
||||
-DnewVersion=${newVersion}
|
||||
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
${bamboo_maven_add_param} \
|
||||
versions:set \
|
||||
-DnewVersion=${newVersion}
|
||||
|
||||
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
--no-transfer-progress \
|
||||
clean deploy \
|
||||
${bamboo_maven_add_param} \
|
||||
-e \
|
||||
-DdeployAtEnd=true \
|
||||
-Dmaven.wagon.http.ssl.insecure=true \
|
||||
-Dmaven.wagon.http.ssl.allowall=true \
|
||||
-Dmaven.wagon.http.ssl.ignore.validity.dates=true \
|
||||
-DaltDeploymentRepository=iqser_release::default::https://nexus.iqser.com/repository/red-platform-releases
|
||||
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
package
|
||||
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
docker:push
|
||||
@ -1,44 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SERVICE_NAME=$1
|
||||
|
||||
echo "build jar binaries"
|
||||
mvn -f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
--no-transfer-progress \
|
||||
clean install \
|
||||
-Djava.security.egd=file:/dev/./urandomelse
|
||||
|
||||
echo "dependency-check:aggregate"
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
org.owasp:dependency-check-maven:aggregate
|
||||
|
||||
if [[ -z "${bamboo_repository_pr_key}" ]]
|
||||
then
|
||||
echo "Sonar Scan for branch: ${bamboo_planRepository_1_branch}"
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
sonar:sonar \
|
||||
-Dsonar.projectKey=RED_$SERVICE_NAME \
|
||||
-Dsonar.host.url=https://sonarqube.iqser.com \
|
||||
-Dsonar.login=${bamboo_sonarqube_api_token_secret} \
|
||||
-Dsonar.branch.name=${bamboo_planRepository_1_branch} \
|
||||
-Dsonar.dependencyCheck.jsonReportPath=target/dependency-check-report.json \
|
||||
-Dsonar.dependencyCheck.xmlReportPath=target/dependency-check-report.xml \
|
||||
-Dsonar.dependencyCheck.htmlReportPath=target/dependency-check-report.html
|
||||
else
|
||||
echo "Sonar Scan for PR with key1: ${bamboo_repository_pr_key}"
|
||||
mvn --no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
sonar:sonar \
|
||||
-Dsonar.projectKey=RED_$SERVICE_NAME \
|
||||
-Dsonar.host.url=https://sonarqube.iqser.com \
|
||||
-Dsonar.login=${bamboo_sonarqube_api_token_secret} \
|
||||
-Dsonar.pullrequest.key=${bamboo_repository_pr_key} \
|
||||
-Dsonar.pullrequest.branch=${bamboo_repository_pr_sourceBranch} \
|
||||
-Dsonar.pullrequest.base=${bamboo_repository_pr_targetBranch} \
|
||||
-Dsonar.dependencyCheck.jsonReportPath=target/dependency-check-report.json \
|
||||
-Dsonar.dependencyCheck.xmlReportPath=target/dependency-check-report.xml \
|
||||
-Dsonar.dependencyCheck.htmlReportPath=target/dependency-check-report.html
|
||||
fi
|
||||
@ -1,21 +0,0 @@
|
||||
package buildjob;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
|
||||
import com.atlassian.bamboo.specs.api.exceptions.PropertiesValidationException;
|
||||
import com.atlassian.bamboo.specs.api.util.EntityPropertiesBuilders;
|
||||
|
||||
public class PlanSpecTest {
|
||||
|
||||
@Test
|
||||
public void checkYourPlanOffline() throws PropertiesValidationException {
|
||||
|
||||
Plan plan = new PlanSpec().createPlan();
|
||||
EntityPropertiesBuilders.build(plan);
|
||||
|
||||
Plan secPlan = new PlanSpec().createSecBuild();
|
||||
EntityPropertiesBuilders.build(secPlan);
|
||||
}
|
||||
|
||||
}
|
||||
7
buildSrc/build.gradle.kts
Normal file
7
buildSrc/build.gradle.kts
Normal file
@ -0,0 +1,7 @@
|
||||
plugins {
|
||||
`kotlin-dsl`
|
||||
}
|
||||
|
||||
repositories {
|
||||
gradlePluginPortal()
|
||||
}
|
||||
@ -0,0 +1,60 @@
|
||||
plugins {
|
||||
`java-library`
|
||||
`maven-publish`
|
||||
pmd
|
||||
checkstyle
|
||||
jacoco
|
||||
}
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven {
|
||||
url = uri("https://nexus.knecon.com/repository/gindev/");
|
||||
credentials {
|
||||
username = providers.gradleProperty("mavenUser").getOrNull();
|
||||
password = providers.gradleProperty("mavenPassword").getOrNull();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group = "com.iqser.red.service"
|
||||
|
||||
java.sourceCompatibility = JavaVersion.VERSION_17
|
||||
java.targetCompatibility = JavaVersion.VERSION_17
|
||||
|
||||
pmd {
|
||||
isConsoleOutput = true
|
||||
}
|
||||
|
||||
tasks.pmdMain {
|
||||
pmd.ruleSetFiles = files("${rootDir}/config/pmd/pmd.xml")
|
||||
}
|
||||
|
||||
tasks.pmdTest {
|
||||
pmd.ruleSetFiles = files("${rootDir}/config/pmd/test_pmd.xml")
|
||||
}
|
||||
|
||||
tasks.named<Test>("test") {
|
||||
useJUnitPlatform()
|
||||
reports {
|
||||
junitXml.outputLocation.set(layout.buildDirectory.dir("reports/junit"))
|
||||
}
|
||||
}
|
||||
|
||||
tasks.test {
|
||||
finalizedBy(tasks.jacocoTestReport) // report is always generated after tests run
|
||||
}
|
||||
|
||||
tasks.jacocoTestReport {
|
||||
dependsOn(tasks.test) // tests are required to run before generating the report
|
||||
reports {
|
||||
xml.required.set(true)
|
||||
csv.required.set(false)
|
||||
html.outputLocation.set(layout.buildDirectory.dir("jacocoHtml"))
|
||||
}
|
||||
}
|
||||
|
||||
java {
|
||||
withJavadocJar()
|
||||
}
|
||||
39
config/checkstyle/checkstyle.xml
Normal file
39
config/checkstyle/checkstyle.xml
Normal file
@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
<module name="Checker">
|
||||
<property
|
||||
name="severity"
|
||||
value="error"/>
|
||||
<module name="TreeWalker">
|
||||
<module name="SuppressWarningsHolder"/>
|
||||
<module name="MissingDeprecated"/>
|
||||
<module name="MissingOverride"/>
|
||||
<module name="AnnotationLocation"/>
|
||||
<module name="JavadocStyle"/>
|
||||
<module name="NonEmptyAtclauseDescription"/>
|
||||
<module name="IllegalImport"/>
|
||||
<module name="RedundantImport"/>
|
||||
<module name="RedundantModifier"/>
|
||||
<module name="EmptyBlock"/>
|
||||
<module name="DefaultComesLast"/>
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="EqualsHashCode"/>
|
||||
<module name="ExplicitInitialization"/>
|
||||
<module name="IllegalInstantiation"/>
|
||||
<module name="ModifiedControlVariable"/>
|
||||
<module name="MultipleVariableDeclarations"/>
|
||||
<module name="PackageDeclaration"/>
|
||||
<module name="ParameterAssignment"/>
|
||||
<module name="SimplifyBooleanExpression"/>
|
||||
<module name="SimplifyBooleanReturn"/>
|
||||
<module name="StringLiteralEquality"/>
|
||||
<module name="OneStatementPerLine"/>
|
||||
<module name="FinalClass"/>
|
||||
<module name="ArrayTypeStyle"/>
|
||||
<module name="UpperEll"/>
|
||||
<module name="OuterTypeFilename"/>
|
||||
</module>
|
||||
<module name="FileTabCharacter"/>
|
||||
<module name="SuppressWarningsFilter"/>
|
||||
</module>
|
||||
20
config/pmd/pmd.xml
Normal file
20
config/pmd/pmd.xml
Normal file
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0"?>
|
||||
<ruleset name="Custom ruleset"
|
||||
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
|
||||
|
||||
<description>
|
||||
Knecon ruleset checks the code for bad stuff
|
||||
</description>
|
||||
|
||||
<rule ref="category/java/errorprone.xml">
|
||||
<exclude name="MissingSerialVersionUID"/>
|
||||
<exclude name="AvoidLiteralsInIfCondition"/>
|
||||
<exclude name="AvoidDuplicateLiterals"/>
|
||||
<exclude name="NullAssignment"/>
|
||||
<exclude name="AssignmentInOperand"/>
|
||||
<exclude name="BeanMembersShouldSerialize"/>
|
||||
</rule>
|
||||
|
||||
</ruleset>
|
||||
22
config/pmd/test_pmd.xml
Normal file
22
config/pmd/test_pmd.xml
Normal file
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0"?>
|
||||
<ruleset name="Custom ruleset"
|
||||
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
|
||||
|
||||
<description>
|
||||
Knecon test ruleset checks the code for bad stuff
|
||||
</description>
|
||||
|
||||
|
||||
<rule ref="category/java/errorprone.xml">
|
||||
<exclude name="MissingSerialVersionUID"/>
|
||||
<exclude name="AvoidLiteralsInIfCondition"/>
|
||||
<exclude name="AvoidDuplicateLiterals"/>
|
||||
<exclude name="NullAssignment"/>
|
||||
<exclude name="AssignmentInOperand"/>
|
||||
<exclude name="TestClassWithoutTestCases"/>
|
||||
<exclude name="BeanMembersShouldSerialize"/>
|
||||
</rule>
|
||||
|
||||
</ruleset>
|
||||
1
gradle.properties.kts
Normal file
1
gradle.properties.kts
Normal file
@ -0,0 +1 @@
|
||||
version = 2.0-SNAPSHOT
|
||||
21
pom.xml
21
pom.xml
@ -1,21 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
|
||||
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<modules>
|
||||
<module>bamboo-specs</module>
|
||||
<module>search-service-v1</module>
|
||||
<module>search-service-image-v1</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
||||
15
publish-custom-image.sh
Executable file
15
publish-custom-image.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
dir=${PWD##*/}
|
||||
gradle assemble
|
||||
|
||||
# Get the current Git branch
|
||||
branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
# Get the short commit hash (first 5 characters)
|
||||
commit_hash=$(git rev-parse --short=5 HEAD)
|
||||
|
||||
# Combine branch and commit hash
|
||||
buildName="${USER}-${branch}-${commit_hash}"
|
||||
|
||||
gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=$buildName
|
||||
echo "nexus.knecon.com:5001/red/${dir}-server-v1:$buildName"
|
||||
6
renovate.json
Normal file
6
renovate.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base"
|
||||
]
|
||||
}
|
||||
@ -1,98 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<artifactId>platform-docker-dependency</artifactId>
|
||||
<version>1.2.0</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service-image-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
|
||||
<properties>
|
||||
<service.server>search-service-server-v1</service.server>
|
||||
<platform.jar>${service.server}.jar</platform.jar>
|
||||
<docker.skip.push>false</docker.skip.push>
|
||||
<docker.image.name>${docker.image.prefix}/${service.server}</docker.image.name>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>io.fabric8</groupId>
|
||||
<artifactId>docker-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>download-platform-jar</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>copy</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>${service.server}</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>jar</type>
|
||||
<overWrite>true</overWrite>
|
||||
<destFileName>${platform.jar}</destFileName>
|
||||
</dependency>
|
||||
</artifactItems>
|
||||
<outputDirectory>${docker.build.directory}</outputDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>io.fabric8</groupId>
|
||||
<artifactId>docker-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<images>
|
||||

|
||||
</images>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
</project>
|
||||
@ -1,7 +0,0 @@
|
||||
FROM red/base-image:2.0.2
|
||||
|
||||
ARG PLATFORM_JAR
|
||||
|
||||
ENV PLATFORM_JAR ${PLATFORM_JAR}
|
||||
|
||||
COPY ["${PLATFORM_JAR}", "/"]
|
||||
@ -1,99 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>platform-dependency</artifactId>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<version>1.17.0</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
|
||||
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<modules>
|
||||
<module>search-service-api-v1</module>
|
||||
<module>search-service-server-v1</module>
|
||||
</modules>
|
||||
|
||||
<dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<artifactId>platform-commons-dependency</artifactId>
|
||||
<version>1.22.0</version>
|
||||
<scope>import</scope>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
</dependencyManagement>
|
||||
|
||||
<build>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.sonarsource.scanner.maven</groupId>
|
||||
<artifactId>sonar-maven-plugin</artifactId>
|
||||
<version>3.9.0.2155</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.owasp</groupId>
|
||||
<artifactId>dependency-check-maven</artifactId>
|
||||
<version>6.3.1</version>
|
||||
<configuration>
|
||||
<format>ALL</format>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<goals>
|
||||
<goal>report</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.jacoco</groupId>
|
||||
<artifactId>jacoco-maven-plugin</artifactId>
|
||||
<version>0.8.8</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>prepare-agent</id>
|
||||
<goals>
|
||||
<goal>prepare-agent</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
<execution>
|
||||
<id>report</id>
|
||||
<goals>
|
||||
<goal>report-aggregate</goal>
|
||||
</goals>
|
||||
<phase>verify</phase>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
28
search-service-v1/search-service-api-v1/build.gradle.kts
Normal file
28
search-service-v1/search-service-api-v1/build.gradle.kts
Normal file
@ -0,0 +1,28 @@
|
||||
plugins {
|
||||
id("com.iqser.red.service.java-conventions")
|
||||
id("io.freefair.lombok") version "8.4"
|
||||
}
|
||||
|
||||
description = "search-service-api-v1"
|
||||
|
||||
dependencies {
|
||||
implementation("org.springframework:spring-web:6.0.6")
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test:3.1.5")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
create<MavenPublication>(name) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
repositories {
|
||||
maven {
|
||||
url = uri("https://nexus.knecon.com/repository/red-platform-releases/")
|
||||
credentials {
|
||||
username = providers.gradleProperty("mavenUser").getOrNull();
|
||||
password = providers.gradleProperty("mavenPassword").getOrNull();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>search-service-api-v1</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-web</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
|
||||
@AllArgsConstructor
|
||||
public class MatchedSection {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String headline;
|
||||
|
||||
@Builder.Default
|
||||
|
||||
@ -3,6 +3,7 @@ package com.iqser.red.service.search.v1.resources;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.ResponseBody;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
@ -17,7 +18,7 @@ public interface SearchResource {
|
||||
|
||||
@ResponseBody
|
||||
@ResponseStatus(value = HttpStatus.OK)
|
||||
@GetMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
|
||||
@PostMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
|
||||
SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest);
|
||||
|
||||
}
|
||||
|
||||
73
search-service-v1/search-service-server-v1/build.gradle.kts
Normal file
73
search-service-v1/search-service-server-v1/build.gradle.kts
Normal file
@ -0,0 +1,73 @@
|
||||
import org.springframework.boot.gradle.tasks.bundling.BootBuildImage
|
||||
|
||||
plugins {
|
||||
application
|
||||
id("com.iqser.red.service.java-conventions")
|
||||
id("org.springframework.boot") version "3.1.5"
|
||||
id("io.spring.dependency-management") version "1.1.3"
|
||||
id("org.sonarqube") version "4.4.1.3373"
|
||||
id("io.freefair.lombok") version "8.4"
|
||||
}
|
||||
|
||||
description = "search-service-server-v1"
|
||||
|
||||
configurations {
|
||||
all {
|
||||
exclude(group = "commons-logging", module = "commons-logging")
|
||||
exclude(group = "org.springframework.boot", module = "spring-boot-starter-log4j2")
|
||||
exclude(group = "com.iqser.red.commons", module = "logging-commons")
|
||||
}
|
||||
}
|
||||
|
||||
val springBootStarterVersion = "3.1.5"
|
||||
|
||||
dependencies {
|
||||
api("com.knecon.fforesight:tenant-commons:0.30.0")
|
||||
api("com.knecon.fforesight:tracing-commons:0.5.0")
|
||||
api("com.knecon.fforesight:lifecycle-commons:0.6.0")
|
||||
api("com.google.guava:guava:31.1-jre")
|
||||
api("com.iqser.red.commons:storage-commons:2.45.0")
|
||||
api(project(":search-service-api-v1"))
|
||||
api("com.iqser.red.service:persistence-service-internal-api-v1:2.576.0-RED10106.0")
|
||||
api("com.iqser.red.commons:spring-commons:2.1.0")
|
||||
api("com.iqser.red.commons:metric-commons:2.1.0")
|
||||
api("com.iqser.red.commons:jackson-commons:2.1.0")
|
||||
api("co.elastic.clients:elasticsearch-java:8.6.2")
|
||||
api("org.opensearch.client:opensearch-rest-client:2.6.0")
|
||||
api("org.opensearch.client:opensearch-java:2.3.0")
|
||||
api("jakarta.json:jakarta.json-api:2.1.1")
|
||||
api("org.springframework.cloud:spring-cloud-starter-openfeign:4.0.4")
|
||||
api("org.springframework.boot:spring-boot-starter-aop:${springBootStarterVersion}")
|
||||
api("org.springframework.boot:spring-boot-starter-amqp:${springBootStarterVersion}")
|
||||
api("net.logstash.logback:logstash-logback-encoder:7.4")
|
||||
api("ch.qos.logback:logback-classic")
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test:${springBootStarterVersion}")
|
||||
testImplementation("com.iqser.red.commons:test-commons:2.1.0")
|
||||
testImplementation("org.testcontainers:elasticsearch:1.17.6")
|
||||
testImplementation("org.opensearch:opensearch-testcontainers:2.0.0")
|
||||
testImplementation("org.springframework.amqp:spring-rabbit-test:3.0.2")
|
||||
}
|
||||
|
||||
tasks.named<BootBuildImage>("bootBuildImage") {
|
||||
|
||||
environment.put("BPE_DELIM_JAVA_TOOL_OPTIONS", " ")
|
||||
environment.put("BPE_APPEND_JAVA_TOOL_OPTIONS", "-Dfile.encoding=UTF-8")
|
||||
|
||||
imageName.set("nexus.knecon.com:5001/red/${project.name}:${project.version}")
|
||||
if (project.hasProperty("buildbootDockerHostNetwork")) {
|
||||
network.set("host")
|
||||
}
|
||||
docker {
|
||||
if (project.hasProperty("buildbootDockerHostNetwork")) {
|
||||
bindHostToBuilder.set(true)
|
||||
}
|
||||
verboseLogging.set(true)
|
||||
|
||||
publishRegistry {
|
||||
username.set(providers.gradleProperty("mavenUser").getOrNull())
|
||||
password.set(providers.gradleProperty("mavenPassword").getOrNull())
|
||||
email.set(providers.gradleProperty("mavenEmail").getOrNull())
|
||||
url.set("https://nexus.knecon.com:5001/")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,198 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>2.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>search-service-server-v1</artifactId>
|
||||
|
||||
<properties>
|
||||
<persistence-service.version>2.0.12</persistence-service.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
<version>31.1-jre</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>storage-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<artifactId>search-service-api-v1</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<artifactId>persistence-service-internal-api-v1</artifactId>
|
||||
<version>${persistence-service.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- commons -->
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>spring-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>logging-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>metric-commons</artifactId>
|
||||
</dependency>
|
||||
<!-- other external -->
|
||||
<dependency>
|
||||
<!-- this dependency is necessary to work with java8 zoned timestamps -->
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>jackson-commons</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>co.elastic.clients</groupId>
|
||||
<artifactId>elasticsearch-java</artifactId>
|
||||
<version>8.6.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.opensearch.client</groupId>
|
||||
<artifactId>opensearch-rest-client</artifactId>
|
||||
<version>2.6.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.opensearch.client</groupId>
|
||||
<artifactId>opensearch-java</artifactId>
|
||||
<version>2.0.0</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>jakarta.json</groupId>
|
||||
<artifactId>jakarta.json-api</artifactId>
|
||||
<version>2.0.1</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-openfeign</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-aop</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-amqp</artifactId>
|
||||
<version>2.3.1.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- test dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>test-commons</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>1.16.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.opensearch</groupId>
|
||||
<artifactId>opensearch-testcontainers</artifactId>
|
||||
<version>2.0.0</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.amqp</groupId>
|
||||
<artifactId>spring-rabbit-test</artifactId>
|
||||
<version>2.3.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<annotationProcessors>
|
||||
<annotationProcessor>lombok.launch.AnnotationProcessorHider$AnnotationProcessor</annotationProcessor>
|
||||
<annotationProcessor>com.dslplatform.json.processor.CompiledJsonAnnotationProcessor</annotationProcessor>
|
||||
</annotationProcessors>
|
||||
</configuration>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<!-- generate git.properties for exposure in /info -->
|
||||
<groupId>pl.project13.maven</groupId>
|
||||
<artifactId>git-commit-id-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>revision</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<generateGitPropertiesFile>true</generateGitPropertiesFile>
|
||||
<gitDescribe>
|
||||
<tags>true</tags>
|
||||
</gitDescribe>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>original-jar</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<classifier>original</classifier>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<!-- repackages the generated jar into a runnable fat-jar and makes it
|
||||
executable -->
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>repackage</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<executable>true</executable>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@ -2,27 +2,31 @@ package com.iqser.red.service.search.v1.server;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.AsyncConfig;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.MultiTenancyMessagingConfiguration;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.MultiTenancyWebConfiguration;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.knecon.fforesight.lifecyclecommons.LifecycleAutoconfiguration;
|
||||
import com.knecon.fforesight.tenantcommons.MultiTenancyAutoConfiguration;
|
||||
|
||||
import io.micrometer.core.aop.TimedAspect;
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
|
||||
@Import({MultiTenancyWebConfiguration.class, AsyncConfig.class, MultiTenancyMessagingConfiguration.class})
|
||||
@ImportAutoConfiguration({MultiTenancyAutoConfiguration.class, LifecycleAutoconfiguration.class})
|
||||
@Import({StorageAutoConfiguration.class})
|
||||
@EnableFeignClients(basePackageClasses = FileStatusClient.class)
|
||||
@EnableConfigurationProperties({ElasticsearchSettings.class, SearchServiceSettings.class})
|
||||
@SpringBootApplication(exclude = {SecurityAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class})
|
||||
@EnableAspectJAutoProxy
|
||||
public class Application {
|
||||
|
||||
public static void main(String[] args) {
|
||||
|
||||
@ -1,10 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.internal.resources.TenantsResource;
|
||||
|
||||
@FeignClient(name = "TenantsResource", url = "${persistence-service.url}")
|
||||
public interface TenantsClient extends TenantsResource {
|
||||
|
||||
}
|
||||
@ -0,0 +1,119 @@
|
||||
package com.iqser.red.service.search.v1.server.configuration;
|
||||
|
||||
import org.springframework.amqp.core.Binding;
|
||||
import org.springframework.amqp.core.BindingBuilder;
|
||||
import org.springframework.amqp.core.DirectExchange;
|
||||
import org.springframework.amqp.core.Queue;
|
||||
import org.springframework.amqp.core.QueueBuilder;
|
||||
import org.springframework.amqp.core.TopicExchange;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class MessagingConfiguration {
|
||||
|
||||
public static final String INDEXING_REQUEST_QUEUE_PREFIX = "indexing_request";
|
||||
public static final String INDEXING_REQUEST_EXCHANGE = "indexing_request_exchange";
|
||||
public static final String INDEXING_DLQ = "indexing_error";
|
||||
|
||||
public static final String DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX = "delete_from_index_request";
|
||||
public static final String DELETE_FROM_INDEX_REQUEST_EXCHANGE = "delete_from_index_request_exchange";
|
||||
public static final String DELETE_FROM_INDEX_DLQ = "delete_from_index_error";
|
||||
|
||||
public static final String X_ERROR_INFO_HEADER = "x-error-message";
|
||||
public static final String X_ERROR_INFO_TIMESTAMP_HEADER = "x-error-message-timestamp";
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-delete-queue:search-service-tenant-delete}")
|
||||
private String tenantDeleteEventQueueName;
|
||||
@Value("${fforesight.multitenancy.tenant-delete-dlq:search-service-tenant-delete-error}")
|
||||
private String tenantDeleteDLQName;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
private String tenantUpdatedEventQueueName;
|
||||
@Value("${fforesight.multitenancy.tenant-updated-dlq:search-service-tenant-updated-error}")
|
||||
private String tenantUpdatedDLQName;
|
||||
|
||||
|
||||
@Bean
|
||||
public DirectExchange indexingRequestExchange() {
|
||||
|
||||
return new DirectExchange(INDEXING_REQUEST_EXCHANGE);
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingDLQ() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_DLQ).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public DirectExchange deleteFromIndexRequestExchange() {
|
||||
|
||||
return new DirectExchange(DELETE_FROM_INDEX_REQUEST_EXCHANGE);
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexDLQ() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Binding tenantExchangeDeleteBinding(@Qualifier("tenantUserManagementTenantDeleteQueue") Queue tenantUserManagementTenantDeleteQueue,
|
||||
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
|
||||
|
||||
return BindingBuilder.bind(tenantUserManagementTenantDeleteQueue).to(tenantExchange).with("tenant.delete");
|
||||
}
|
||||
|
||||
|
||||
@Bean("tenantUserManagementTenantDeleteQueue")
|
||||
public Queue tenantDeleteQueue() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantDeleteEventQueueName)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", this.tenantDeleteDLQName)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue tenantDeleteDLQ() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantDeleteDLQName).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Binding tenantExchangeUpdatedBinding(@Qualifier("tenantUserManagementTenantUpdatedQueue") Queue tenantUserManagementTenantUpdatedQueue,
|
||||
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
|
||||
|
||||
return BindingBuilder.bind(tenantUserManagementTenantUpdatedQueue).to(tenantExchange).with("tenant.updated");
|
||||
}
|
||||
|
||||
|
||||
@Bean("tenantUserManagementTenantUpdatedQueue")
|
||||
public Queue tenantUpdatedQueue() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantUpdatedEventQueueName)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", this.tenantUpdatedDLQName)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue tenantUpdatedDLQ() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantUpdatedDLQName).build();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,11 @@
|
||||
package com.iqser.red.service.search.v1.server.configuration;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.queue.TenantMessagingConfiguration;
|
||||
|
||||
@Configuration
|
||||
public class TenantMessagingConfigurationImpl extends TenantMessagingConfiguration {
|
||||
|
||||
|
||||
}
|
||||
@ -8,11 +8,11 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@ -20,6 +20,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@SuppressWarnings("PMD")
|
||||
public class MigrationStarterService {
|
||||
|
||||
private final ApplicationContext ctx;
|
||||
@ -45,4 +46,4 @@ public class MigrationStarterService {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,7 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
|
||||
@ -16,7 +16,7 @@ import lombok.NoArgsConstructor;
|
||||
@SuppressWarnings("serial")
|
||||
public class IndexSection implements Serializable {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String text;
|
||||
private Set<Integer> pages;
|
||||
private String headline;
|
||||
|
||||
@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
|
||||
@AllArgsConstructor
|
||||
public class SectionText {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String headline;
|
||||
private String text;
|
||||
|
||||
|
||||
@ -3,14 +3,11 @@ package com.iqser.red.service.search.v1.server.model;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.dslplatform.json.CompiledJson;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@CompiledJson
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class Text {
|
||||
|
||||
@ -1,27 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import java.util.concurrent.Executor;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.scheduling.annotation.AsyncConfigurerSupport;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
||||
|
||||
@Configuration
|
||||
public class AsyncConfig extends AsyncConfigurerSupport {
|
||||
|
||||
@Override
|
||||
public Executor getAsyncExecutor() {
|
||||
|
||||
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
|
||||
|
||||
executor.setCorePoolSize(7);
|
||||
executor.setMaxPoolSize(42);
|
||||
executor.setQueueCapacity(11);
|
||||
executor.setThreadNamePrefix("TenantAwareTaskExecutor-");
|
||||
executor.setTaskDecorator(new TenantAwareTaskDecorator());
|
||||
executor.initialize();
|
||||
|
||||
return executor;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,105 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.SecureRandom;
|
||||
import java.security.spec.KeySpec;
|
||||
import java.util.Base64;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.SecretKey;
|
||||
import javax.crypto.SecretKeyFactory;
|
||||
import javax.crypto.spec.GCMParameterSpec;
|
||||
import javax.crypto.spec.PBEKeySpec;
|
||||
import javax.crypto.spec.SecretKeySpec;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
public class EncryptionDecryptionService {
|
||||
|
||||
@Value("${search-service.crypto.key:redaction}")
|
||||
private String key;
|
||||
|
||||
private SecretKey secretKey;
|
||||
private byte[] iv;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@PostConstruct
|
||||
protected void postConstruct() {
|
||||
|
||||
SecureRandom secureRandom = new SecureRandom();
|
||||
iv = new byte[12];
|
||||
secureRandom.nextBytes(iv);
|
||||
secretKey = generateSecretKey(key, iv);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public String encrypt(String strToEncrypt) {
|
||||
|
||||
return Base64.getEncoder().encodeToString(encrypt(strToEncrypt.getBytes()));
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public String decrypt(String strToDecrypt) {
|
||||
|
||||
byte[] bytes = Base64.getDecoder().decode(strToDecrypt);
|
||||
return new String(decrypt(bytes), StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public byte[] encrypt(byte[] data) {
|
||||
|
||||
Cipher cipher = Cipher.getInstance("AES/GCM/NoPadding");
|
||||
GCMParameterSpec parameterSpec = new GCMParameterSpec(128, iv);
|
||||
cipher.init(Cipher.ENCRYPT_MODE, secretKey, parameterSpec);
|
||||
byte[] encryptedData = cipher.doFinal(data);
|
||||
ByteBuffer byteBuffer = ByteBuffer.allocate(4 + iv.length + encryptedData.length);
|
||||
byteBuffer.putInt(iv.length);
|
||||
byteBuffer.put(iv);
|
||||
byteBuffer.put(encryptedData);
|
||||
return byteBuffer.array();
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public byte[] decrypt(byte[] encryptedData) {
|
||||
|
||||
ByteBuffer byteBuffer = ByteBuffer.wrap(encryptedData);
|
||||
int noonceSize = byteBuffer.getInt();
|
||||
if (noonceSize < 12 || noonceSize >= 16) {
|
||||
throw new IllegalArgumentException("Nonce size is incorrect. Make sure that the incoming data is an AES encrypted file.");
|
||||
}
|
||||
byte[] iv = new byte[noonceSize];
|
||||
byteBuffer.get(iv);
|
||||
|
||||
SecretKey secretKey = generateSecretKey(key, iv);
|
||||
|
||||
byte[] cipherBytes = new byte[byteBuffer.remaining()];
|
||||
byteBuffer.get(cipherBytes);
|
||||
|
||||
Cipher cipher = Cipher.getInstance("AES/GCM/NoPadding");
|
||||
GCMParameterSpec parameterSpec = new GCMParameterSpec(128, iv);
|
||||
cipher.init(Cipher.DECRYPT_MODE, secretKey, parameterSpec);
|
||||
return cipher.doFinal(cipherBytes);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public SecretKey generateSecretKey(String password, byte[] iv) {
|
||||
|
||||
KeySpec spec = new PBEKeySpec(password.toCharArray(), iv, 65536, 128); // AES-128
|
||||
SecretKeyFactory secretKeyFactory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
|
||||
byte[] key = secretKeyFactory.generateSecret(spec).getEncoded();
|
||||
return new SecretKeySpec(key, "AES");
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,18 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import feign.RequestInterceptor;
|
||||
import feign.RequestTemplate;
|
||||
|
||||
@Component
|
||||
public class ForwardTenantInterceptor implements RequestInterceptor {
|
||||
|
||||
public static final String TENANT_HEADER_NAME = "X-TENANT-ID";
|
||||
|
||||
@Override
|
||||
public void apply(RequestTemplate template) {
|
||||
// do something
|
||||
template.header(TENANT_HEADER_NAME, TenantContext.getTenantId());
|
||||
}
|
||||
}
|
||||
@ -1,49 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.multitenancy.TenantInterceptor.TENANT_HEADER_NAME;
|
||||
|
||||
import org.springframework.amqp.rabbit.config.AbstractRabbitListenerContainerFactory;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.beans.BeansException;
|
||||
import org.springframework.beans.factory.config.BeanPostProcessor;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class MultiTenancyMessagingConfiguration {
|
||||
|
||||
@Bean
|
||||
public static BeanPostProcessor multitenancyBeanPostProcessor() {
|
||||
|
||||
return new BeanPostProcessor() {
|
||||
|
||||
@Override
|
||||
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
|
||||
|
||||
if (bean instanceof RabbitTemplate) {
|
||||
|
||||
((RabbitTemplate) bean).setBeforePublishPostProcessors(m -> {
|
||||
m.getMessageProperties().setHeader(TENANT_HEADER_NAME, TenantContext.getTenantId());
|
||||
return m;
|
||||
});
|
||||
|
||||
} else if (bean instanceof AbstractRabbitListenerContainerFactory) {
|
||||
|
||||
((AbstractRabbitListenerContainerFactory<?>) bean).setAfterReceivePostProcessors(m -> {
|
||||
String tenant = m.getMessageProperties().getHeader(TENANT_HEADER_NAME);
|
||||
|
||||
if (tenant != null) {
|
||||
TenantContext.setTenantId(tenant);
|
||||
} else {
|
||||
throw new RuntimeException("No Tenant is set queue message");
|
||||
}
|
||||
return m;
|
||||
});
|
||||
}
|
||||
return bean;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,28 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
|
||||
|
||||
import com.iqser.red.commons.spring.DefaultWebMvcConfiguration;
|
||||
|
||||
@Configuration
|
||||
public class MultiTenancyWebConfiguration extends DefaultWebMvcConfiguration {
|
||||
|
||||
private final TenantInterceptor tenantInterceptor;
|
||||
|
||||
|
||||
@Autowired
|
||||
public MultiTenancyWebConfiguration(TenantInterceptor tenantInterceptor) {
|
||||
|
||||
this.tenantInterceptor = tenantInterceptor;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void addInterceptors(InterceptorRegistry registry) {
|
||||
|
||||
registry.addWebRequestInterceptor(tenantInterceptor);
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,45 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.storage.commons.model.AzureStorageConnection;
|
||||
import com.iqser.red.storage.commons.model.S3StorageConnection;
|
||||
import com.iqser.red.storage.commons.service.StorageConnectionProvider;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class StorageConnectionProviderImpl implements StorageConnectionProvider {
|
||||
|
||||
private final TenantsClient tenantsClient;
|
||||
private final EncryptionDecryptionService encryptionDecryptionService;
|
||||
|
||||
|
||||
@Override
|
||||
public AzureStorageConnection getAzureStorageConnection(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
return AzureStorageConnection.builder()
|
||||
.connectionString(encryptionDecryptionService.decrypt(tenant.getAzureStorageConnection().getConnectionString()))
|
||||
.containerName(tenant.getAzureStorageConnection().getContainerName())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public S3StorageConnection getS3StorageConnection(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
return S3StorageConnection.builder()
|
||||
.key(tenant.getS3StorageConnection().getKey())
|
||||
.secret(encryptionDecryptionService.decrypt(tenant.getS3StorageConnection().getSecret()))
|
||||
.signerType(tenant.getS3StorageConnection().getSignerType())
|
||||
.bucketName(tenant.getS3StorageConnection().getBucketName())
|
||||
.region(tenant.getS3StorageConnection().getRegion())
|
||||
.endpoint(tenant.getS3StorageConnection().getEndpoint())
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,23 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import org.springframework.core.task.TaskDecorator;
|
||||
import org.springframework.lang.NonNull;
|
||||
|
||||
public class TenantAwareTaskDecorator implements TaskDecorator {
|
||||
|
||||
@Override
|
||||
@NonNull
|
||||
public Runnable decorate(@NonNull Runnable runnable) {
|
||||
|
||||
String tenantId = TenantContext.getTenantId();
|
||||
return () -> {
|
||||
try {
|
||||
TenantContext.setTenantId(tenantId);
|
||||
runnable.run();
|
||||
} finally {
|
||||
TenantContext.setTenantId(null);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,29 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
public final class TenantContext {
|
||||
|
||||
private static InheritableThreadLocal<String> currentTenant = new InheritableThreadLocal<>();
|
||||
|
||||
|
||||
public static void setTenantId(String tenantId) {
|
||||
|
||||
log.debug("Setting tenantId to " + tenantId);
|
||||
currentTenant.set(tenantId);
|
||||
}
|
||||
|
||||
|
||||
public static String getTenantId() {
|
||||
|
||||
return currentTenant.get();
|
||||
}
|
||||
|
||||
|
||||
public static void clear() {
|
||||
|
||||
currentTenant.remove();
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,35 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.multitenancy;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.ui.ModelMap;
|
||||
import org.springframework.web.context.request.WebRequest;
|
||||
import org.springframework.web.context.request.WebRequestInterceptor;
|
||||
|
||||
@Component
|
||||
public class TenantInterceptor implements WebRequestInterceptor {
|
||||
|
||||
public static final String TENANT_HEADER_NAME = "X-TENANT-ID";
|
||||
|
||||
|
||||
@Override
|
||||
public void preHandle(WebRequest request) {
|
||||
|
||||
if (request.getHeader(TENANT_HEADER_NAME) != null) {
|
||||
TenantContext.setTenantId(request.getHeader(TENANT_HEADER_NAME));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void postHandle(WebRequest request, ModelMap model) {
|
||||
|
||||
TenantContext.clear();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void afterCompletion(WebRequest request, Exception ex) {
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,38 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DeleteTenantMessageReceiver {
|
||||
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
|
||||
private String tenantDeleteQueue;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
|
||||
log.info("Listener for tenant-delete started for queue: {}", this.tenantDeleteQueue);
|
||||
}
|
||||
|
||||
|
||||
@RabbitListener(queues = "${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
|
||||
public void deleteTenant(TenantResponse tenant) {
|
||||
|
||||
indexDeleteService.dropIndex(tenant.getSearchConnection());
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,10 +1,14 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_REQUEST_EXCHANGE;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.amqp.AmqpRejectAndDontRequeueException;
|
||||
@ -14,9 +18,9 @@ import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
@ -31,6 +35,7 @@ import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.service.TextStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
@ -41,6 +46,9 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@RequiredArgsConstructor
|
||||
public class IndexingMessageReceiver {
|
||||
|
||||
public static final String INDEXING_LISTENER_ID = "indexing-listener";
|
||||
public static final String DELETE_FROM_INDEX_LISTENER_ID = "delete-from-index-listener";
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
private final TextStorageService textStorageService;
|
||||
private final FileStatusClient fileStatusClient;
|
||||
@ -58,7 +66,7 @@ public class IndexingMessageReceiver {
|
||||
|
||||
@SneakyThrows
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_QUEUE)
|
||||
@RabbitListener(id = INDEXING_LISTENER_ID)
|
||||
public void receiveIndexingRequest(Message message) {
|
||||
|
||||
var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
|
||||
@ -66,11 +74,18 @@ public class IndexingMessageReceiver {
|
||||
// This prevents from endless retries oom errors.
|
||||
if (message.getMessageProperties().isRedelivered()) {
|
||||
throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
|
||||
indexRequest.getDossierId(),
|
||||
indexRequest.getFileId()));
|
||||
indexRequest.getDossierId(),
|
||||
indexRequest.getFileId()));
|
||||
}
|
||||
|
||||
receiveIndexingRequest(indexRequest);
|
||||
try {
|
||||
receiveIndexingRequest(indexRequest);
|
||||
} catch (Exception e) {
|
||||
log.warn("An exception occurred in processing the indexing request stage: ", e);
|
||||
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
|
||||
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -78,10 +93,12 @@ public class IndexingMessageReceiver {
|
||||
|
||||
log.info("Processing indexing request: {}", indexRequest);
|
||||
|
||||
FileModel fileStatus;
|
||||
Dossier dossier;
|
||||
switch (indexRequest.getMessageType()) {
|
||||
case INSERT:
|
||||
var fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
var dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
indexFile(dossier, fileStatus);
|
||||
break;
|
||||
|
||||
@ -89,24 +106,23 @@ public class IndexingMessageReceiver {
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
|
||||
var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
fileStatus.getWorkflowStatus().name(),
|
||||
fileStatus.getFileAttributes());
|
||||
if(documentUpdateService.documentExists(indexRequest.getFileId())) {var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
fileStatus.getWorkflowStatus().name(),
|
||||
fileStatus.getFileAttributes());
|
||||
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
log.info("Successfully updated {}", indexRequest);
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
log.info("Successfully updated {}", indexRequest);
|
||||
} else {
|
||||
indexFile(dossier, fileStatus);
|
||||
}
|
||||
break;
|
||||
|
||||
case DROP:
|
||||
indexDeleteService.recreateIndex();
|
||||
addAllDocumentsToIndexQueue();
|
||||
try {
|
||||
indexInformationService.updateIndexInformation();
|
||||
} catch (Exception e) {
|
||||
log.error("Could not update index information", e);
|
||||
}
|
||||
indexInformationService.updateIndexInformation();
|
||||
break;
|
||||
|
||||
default:
|
||||
@ -117,34 +133,56 @@ public class IndexingMessageReceiver {
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_DQL)
|
||||
public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
|
||||
@RabbitListener(queues = INDEXING_DLQ)
|
||||
public void receiveIndexingRequestDQL(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
String errorLog = "Failed to process indexing request:";
|
||||
log.info(errorLog + ": {}", indexRequest);
|
||||
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
|
||||
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
|
||||
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
|
||||
indexRequest.getFileId(),
|
||||
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process indexing request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
|
||||
public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
|
||||
@RabbitListener(id = DELETE_FROM_INDEX_LISTENER_ID)
|
||||
public void receiveDeleteDocumentRequest(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
log.info("Processing delete document request: {}", indexRequest);
|
||||
documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
try {
|
||||
documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
} catch (Exception e) {
|
||||
log.warn("An exception occurred in processing delete document stage: {}", e.getMessage());
|
||||
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
|
||||
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
|
||||
public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
|
||||
public void receiveDeleteDocumentRequestDLQ(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
String errorLog = "Failed to process delete from index request ";
|
||||
log.info(errorLog + ": {}", indexRequest);
|
||||
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
|
||||
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
|
||||
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
|
||||
indexRequest.getFileId(),
|
||||
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process delete from index request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
@ -154,15 +192,15 @@ public class IndexingMessageReceiver {
|
||||
Text text = textStorageService.getText(dossier.getId(), file.getId());
|
||||
|
||||
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
|
||||
dossier.getId(),
|
||||
file.getId(),
|
||||
file.getFilename(),
|
||||
text,
|
||||
file.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
file.getWorkflowStatus(),
|
||||
file.getFileAttributes());
|
||||
dossier.getId(),
|
||||
file.getId(),
|
||||
file.getFilename(),
|
||||
text,
|
||||
file.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
file.getWorkflowStatus(),
|
||||
file.getFileAttributes());
|
||||
|
||||
documentIndexService.indexDocument(indexDocument);
|
||||
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
|
||||
@ -184,17 +222,14 @@ public class IndexingMessageReceiver {
|
||||
private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
|
||||
|
||||
for (FileModel file : files) {
|
||||
try {
|
||||
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
|
||||
rabbitTemplate.convertAndSend(INDEXING_QUEUE,
|
||||
objectMapper.writeValueAsString(IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build()),
|
||||
message -> {
|
||||
message.getMessageProperties().setPriority(99);
|
||||
return message;
|
||||
});
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
|
||||
rabbitTemplate.convertAndSend(INDEXING_REQUEST_EXCHANGE,
|
||||
TenantContext.getTenantId(),
|
||||
IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build(),
|
||||
message -> {
|
||||
message.getMessageProperties().setPriority(99);
|
||||
return message;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,52 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.core.Queue;
|
||||
import org.springframework.amqp.core.QueueBuilder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class MessagingConfiguration {
|
||||
|
||||
public static final String INDEXING_QUEUE = "indexingQueue";
|
||||
public static final String INDEXING_DQL = "indexingDQL";
|
||||
|
||||
public static final String DELETE_FROM_INDEX_QUEUE = "deleteFromIndexQueue";
|
||||
public static final String DELETE_FROM_INDEX_DLQ = "deleteFromIndexDLQ";
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingQueue() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_QUEUE).withArgument("x-dead-letter-exchange", "").withArgument("x-dead-letter-routing-key", INDEXING_DQL).maxPriority(2).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingDeadLetterQueue() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_DQL).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexQueue() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_QUEUE)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", DELETE_FROM_INDEX_DLQ)
|
||||
.maxPriority(2)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexDLQ() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,74 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.*;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.TenantProvider;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantCreatedEvent;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantQueueConfiguration;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
import com.knecon.fforesight.tenantcommons.queue.RabbitQueueFromExchangeService;
|
||||
import com.knecon.fforesight.tenantcommons.queue.TenantExchangeMessageReceiver;
|
||||
|
||||
@Service
|
||||
public class TenantExchangeMessageReceiverImpl extends TenantExchangeMessageReceiver {
|
||||
|
||||
public TenantExchangeMessageReceiverImpl(RabbitQueueFromExchangeService rabbitQueueService, TenantProvider tenantProvider) {
|
||||
|
||||
super(rabbitQueueService, tenantProvider);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected Set<TenantQueueConfiguration> getTenantQueueConfigs() {
|
||||
|
||||
return Set.of(TenantQueueConfiguration.builder()
|
||||
.listenerId(IndexingMessageReceiver.INDEXING_LISTENER_ID)
|
||||
.exchangeName(INDEXING_REQUEST_EXCHANGE)
|
||||
.queuePrefix(INDEXING_REQUEST_QUEUE_PREFIX)
|
||||
.dlqName(INDEXING_DLQ)
|
||||
.arguments(Map.of("x-max-priority", 2))
|
||||
.build(),
|
||||
TenantQueueConfiguration.builder()
|
||||
.listenerId(IndexingMessageReceiver.DELETE_FROM_INDEX_LISTENER_ID)
|
||||
.exchangeName(DELETE_FROM_INDEX_REQUEST_EXCHANGE)
|
||||
.queuePrefix(DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX)
|
||||
.dlqName(DELETE_FROM_INDEX_DLQ)
|
||||
.arguments(Map.of("x-max-priority", 2))
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void onApplicationReady() {
|
||||
|
||||
System.out.println("application ready invoked");
|
||||
super.initializeQueues();
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantCreatedQueueName()}")
|
||||
public void reactToTenantCreation(TenantCreatedEvent tenantCreatedEvent) {
|
||||
|
||||
super.reactToTenantCreation(tenantCreatedEvent);
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantDeletedQueueName()}")
|
||||
public void reactToTenantDeletion(TenantResponse tenantResponse) {
|
||||
|
||||
super.reactToTenantDeletion(tenantResponse);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,48 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class UpdatedTenantMessageReceiver {
|
||||
|
||||
private final IndexQueryService indexQueryService;
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
private String tenantUpdatedQueue;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
|
||||
log.info("Listener for tenant updated events started for queue: {}", this.tenantUpdatedQueue);
|
||||
}
|
||||
|
||||
|
||||
@RabbitListener(queues = "${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
public void updateTenant(TenantResponse tenant) {
|
||||
|
||||
String numberOfReplicas = tenant.getSearchConnection().getNumberOfReplicas();
|
||||
String numberOfShards = tenant.getSearchConnection().getNumberOfShards();
|
||||
IndexQueryResult queryResult = indexQueryService.getIndexQueryResult(tenant.getSearchConnection());
|
||||
|
||||
if (queryResult.isIndexFound() && (!numberOfReplicas.equals(queryResult.getNumberOfReplicas()) || !numberOfShards.equals(queryResult.getNumberOfShards()))) {
|
||||
log.info("Number of shards or replicas were changed during tenant update, indices will be recreated");
|
||||
indexDeleteService.recreateIndex(tenant.getSearchConnection());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -5,5 +5,6 @@ import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
public interface DocumentUpdateService {
|
||||
|
||||
void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate);
|
||||
boolean documentExists(String fileId);
|
||||
|
||||
}
|
||||
@ -1,13 +1,19 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
public interface IndexDeleteService {
|
||||
|
||||
void recreateIndex();
|
||||
|
||||
void recreateIndex(SearchConnection searchConnection);
|
||||
|
||||
|
||||
void closeIndex();
|
||||
|
||||
|
||||
void dropIndex();
|
||||
|
||||
void dropIndex(SearchConnection searchConnection);
|
||||
|
||||
}
|
||||
@ -68,13 +68,13 @@ public class IndexInformationService {
|
||||
byte[] buffer = new byte[8192];
|
||||
int count;
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream());
|
||||
while ((count = bis.read(buffer)) > 0) {
|
||||
digest.update(buffer, 0, count);
|
||||
}
|
||||
bis.close();
|
||||
try (BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream())) {
|
||||
while ((count = bis.read(buffer)) > 0) {
|
||||
digest.update(buffer, 0, count);
|
||||
}
|
||||
|
||||
return Arrays.toString(digest.digest());
|
||||
return Arrays.toString(digest.digest());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,17 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
public class IndexQueryResult {
|
||||
|
||||
boolean indexFound;
|
||||
String numberOfShards;
|
||||
String numberOfReplicas;
|
||||
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
public interface IndexQueryService {
|
||||
|
||||
IndexQueryResult getIndexQueryResult(SearchConnection searchConnection);
|
||||
|
||||
}
|
||||
@ -4,9 +4,9 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@ -6,9 +6,9 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
@ -26,11 +26,15 @@ public class DocumentDeleteServiceImpl implements DocumentDeleteService {
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(TenantContext.getTenantId()).id(fileId).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())).build();
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.build();
|
||||
|
||||
try {
|
||||
clientCache.getClient().delete(request);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentDeleteError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,9 +7,9 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
@ -32,13 +32,14 @@ public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.index(i -> i.index(TenantContext.getTenantId())
|
||||
.index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,9 +7,9 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
@ -32,11 +32,22 @@ public class DocumentUpdateServiceImpl implements DocumentUpdateService {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.update(u -> u.index(TenantContext.getTenantId()).id(fileId).doc(indexDocumentUpdate).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())),
|
||||
IndexDocumentUpdate.class);
|
||||
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.doc(indexDocumentUpdate)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentUpdateError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_documentExists")
|
||||
public boolean documentExists(String fileId) {
|
||||
|
||||
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -2,6 +2,7 @@ package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
@ -10,7 +11,7 @@ import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import co.elastic.clients.elasticsearch.ElasticsearchClient;
|
||||
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
|
||||
@ -20,10 +21,11 @@ import lombok.Data;
|
||||
import lombok.experimental.Delegate;
|
||||
|
||||
@Data
|
||||
@SuppressWarnings("PMD")
|
||||
public class EsClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
|
||||
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
@ -36,11 +38,12 @@ public class EsClient {
|
||||
HttpHost[] httpHost = searchConnection.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
|
||||
.collect(Collectors.toList())
|
||||
.toList()
|
||||
.toArray(new HttpHost[searchConnection.getHosts().size()]);
|
||||
|
||||
RestClientBuilder builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT).setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
var builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT)
|
||||
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
@ -54,4 +57,10 @@ public class EsClient {
|
||||
this.elasticsearchClient = new ElasticsearchClient(transport);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void terminate() {
|
||||
|
||||
elasticsearchClient._transport().close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -1,9 +1,6 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
@ -13,11 +10,11 @@ import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Connection;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.EncryptionDecryptionService;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@ -38,37 +35,57 @@ public class EsClientCache {
|
||||
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
|
||||
private Integer expireAfterAccess;
|
||||
|
||||
private LoadingCache<String, Connection> connections;
|
||||
private LoadingCache<Connection, EsClient> clients;
|
||||
private LoadingCache<String, EsClient> clients;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void isClientAliveOrTerminate() {
|
||||
|
||||
try {
|
||||
var client = clients.get(TenantContext.getTenantId());
|
||||
try {
|
||||
|
||||
log.info("Checking if client is still alive: {}", client.info());
|
||||
} catch (Exception e) {
|
||||
|
||||
try {
|
||||
client.terminate();
|
||||
} catch (Exception e2) {
|
||||
|
||||
log.info("Failed to terminate ES Client");
|
||||
clients.invalidate(TenantContext.getTenantId());
|
||||
}
|
||||
}
|
||||
}catch (Exception e){
|
||||
log.error("Failed to terminate/invalide client", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@PostConstruct
|
||||
protected void createCache() {
|
||||
|
||||
connections = CacheBuilder.newBuilder().maximumSize(maximumSize).expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES).build(new CacheLoader<>() {
|
||||
public Connection load(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
var hostsAsString = tenant.getSearchConnection().getHosts().stream().collect(Collectors.joining());
|
||||
return Connection.builder().hosts(hostsAsString).searchConnection(tenant.getSearchConnection()).build();
|
||||
}
|
||||
});
|
||||
|
||||
clients = CacheBuilder.newBuilder()
|
||||
.maximumSize(maximumSize)
|
||||
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<Connection, EsClient>) removal -> {
|
||||
removal.getValue().shutdown();
|
||||
log.info("Closed elasticsearch client for tenant {}", removal.getKey().getHosts());
|
||||
.removalListener((RemovalListener<String, EsClient>) removal -> {
|
||||
try {
|
||||
removal.getValue().terminate();
|
||||
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
|
||||
} catch (Exception e) {
|
||||
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
|
||||
}
|
||||
})
|
||||
.build(new CacheLoader<>() {
|
||||
public EsClient load(Connection key) {
|
||||
public EsClient load(String tenantId) {
|
||||
|
||||
if (key.getSearchConnection().getPassword() != null) {
|
||||
key.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(key.getSearchConnection().getPassword()));
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
|
||||
if (tenant.getSearchConnection().getPassword() != null) {
|
||||
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
|
||||
}
|
||||
var client = new EsClient(key.getSearchConnection());
|
||||
log.info("Initialized elasticsearch client for tenant {}", key);
|
||||
var client = new EsClient(tenant.getSearchConnection());
|
||||
log.info("Initialized elasticsearch client for tenant {}", tenantId);
|
||||
indexCreatorService.createIndex(client);
|
||||
return client;
|
||||
}
|
||||
@ -79,8 +96,7 @@ public class EsClientCache {
|
||||
@SneakyThrows
|
||||
public EsClient getClient() {
|
||||
|
||||
var connection = connections.get(TenantContext.getTenantId());
|
||||
return clients.get(connection);
|
||||
return clients.get(TenantContext.getTenantId());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,8 +9,8 @@ import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexSettings;
|
||||
@ -23,6 +23,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexCreatorServiceImpl {
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
@ -32,7 +33,10 @@ public class IndexCreatorServiceImpl {
|
||||
|
||||
if (!indexExists(esClient)) {
|
||||
try {
|
||||
var response = esClient.indices().create(i -> i.index(TenantContext.getTenantId()).settings(createIndexSettings(esClient)).mappings(createIndexMapping()));
|
||||
var response = esClient.indices()
|
||||
.create(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix()))
|
||||
.settings(createIndexSettings(esClient))
|
||||
.mappings(createIndexMapping()));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
@ -44,7 +48,7 @@ public class IndexCreatorServiceImpl {
|
||||
private boolean indexExists(EsClient esClient) {
|
||||
|
||||
try {
|
||||
var response = esClient.indices().exists(i -> i.index(TenantContext.getTenantId()));
|
||||
var response = esClient.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix())));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw IndexException.indexExists(e);
|
||||
@ -77,4 +81,4 @@ public class IndexCreatorServiceImpl {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -4,8 +4,9 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
@ -29,10 +30,45 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void recreateIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new EsClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
indexCreatorService.createIndex(client);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
var closeIndexResponse = clientCache.getClient().indices().close(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
public void dropIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new EsClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void closeIndex(EsClient client, String indexPrefix) {
|
||||
|
||||
var closeIndexResponse = client.indices()
|
||||
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
@ -42,10 +78,11 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
private void dropIndex(EsClient client, String indexPrefix) {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = clientCache.getClient().indices().delete(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
var deleteIndexResponse = client.indices()
|
||||
.delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
@ -54,4 +91,4 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,70 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch.indices.GetIndicesSettingsResponse;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexState;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexQueryServiceImpl implements IndexQueryService {
|
||||
|
||||
@SneakyThrows
|
||||
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
|
||||
|
||||
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
|
||||
|
||||
getIndexState(searchConnection).ifPresent(indexState -> {
|
||||
builder.indexFound(true);
|
||||
|
||||
var indexSettings = indexState.settings();
|
||||
if (indexSettings != null) {
|
||||
|
||||
String replicas = indexSettings.numberOfReplicas();
|
||||
String shards = indexSettings.numberOfShards();
|
||||
|
||||
if (indexSettings.index() != null) {
|
||||
|
||||
if (replicas == null) {
|
||||
replicas = indexSettings.index().numberOfReplicas();
|
||||
}
|
||||
if (shards == null) {
|
||||
shards = indexSettings.index().numberOfShards();
|
||||
}
|
||||
}
|
||||
builder.numberOfReplicas(replicas).numberOfShards(shards);
|
||||
}
|
||||
});
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
|
||||
|
||||
var esClient = new EsClient(searchConnection);
|
||||
var indexName = IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix());
|
||||
try {
|
||||
GetIndicesSettingsResponse settings = esClient.indices().getSettings(i -> i.index(indexName));
|
||||
return Optional.ofNullable(settings.get(indexName));
|
||||
} catch (ElasticsearchException elasticsearchException) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -22,6 +22,7 @@ import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
@ -70,7 +71,8 @@ public class SearchServiceImpl implements SearchService {
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
@ -107,6 +109,7 @@ public class SearchServiceImpl implements SearchService {
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
@ -307,7 +310,7 @@ public class SearchServiceImpl implements SearchService {
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
.sectionNumber(indexSection.getString("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
@ -331,4 +334,4 @@ public class SearchServiceImpl implements SearchService {
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,9 +9,9 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@ -26,11 +26,15 @@ public class DocumentDeleteServiceImpl implements DocumentDeleteService {
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(TenantContext.getTenantId()).id(fileId).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())).build();
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.build();
|
||||
|
||||
try {
|
||||
clientCache.getClient().delete(request);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentDeleteError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,9 +9,9 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@ -31,11 +31,12 @@ public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
public void indexDocument(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
clientCache.getClient().index(i -> i.index(TenantContext.getTenantId())
|
||||
clientCache.getClient().index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,9 +9,9 @@ import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@ -32,11 +32,22 @@ public class DocumentUpdateServiceImpl implements DocumentUpdateService {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.update(u -> u.index(TenantContext.getTenantId()).id(fileId).doc(indexDocumentUpdate).refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())),
|
||||
IndexDocumentUpdate.class);
|
||||
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.doc(indexDocumentUpdate)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentUpdateError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_documentExists")
|
||||
public boolean documentExists(String fileId) {
|
||||
|
||||
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -12,8 +12,8 @@ import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import jakarta.json.stream.JsonParser;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
@ -24,6 +24,7 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexCreatorServiceImpl {
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
@ -34,7 +35,10 @@ public class IndexCreatorServiceImpl {
|
||||
if (!indexExists(client)) {
|
||||
|
||||
try {
|
||||
var response = client.indices().create(i -> i.index(TenantContext.getTenantId()).settings(createIndexSettings(client)).mappings(createIndexMapping(client)));
|
||||
var response = client.indices()
|
||||
.create(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix()))
|
||||
.settings(createIndexSettings(client))
|
||||
.mappings(createIndexMapping(client)));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
@ -46,7 +50,7 @@ public class IndexCreatorServiceImpl {
|
||||
private boolean indexExists(OpensearchClient client) {
|
||||
|
||||
try {
|
||||
var response = client.indices().exists(i -> i.index(TenantContext.getTenantId()));
|
||||
var response = client.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix())));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw IndexException.indexExists(e);
|
||||
|
||||
@ -4,8 +4,9 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
@ -29,10 +30,44 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void recreateIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new OpensearchClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
indexCreatorService.createIndex(client);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
var closeIndexResponse = clientCache.getClient().indices().close(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
public void dropIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new OpensearchClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void closeIndex(OpensearchClient opensearchClient, String indexPrefix) {
|
||||
|
||||
var closeIndexResponse = opensearchClient.indices()
|
||||
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
@ -42,10 +77,10 @@ public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
private void dropIndex(OpensearchClient opensearchClient, String indexPrefix) {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = clientCache.getClient().indices().delete(i -> i.index(TenantContext.getTenantId()).timeout(t -> t.time("2m")));
|
||||
var deleteIndexResponse = opensearchClient.indices().delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
|
||||
@ -0,0 +1,56 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch.indices.GetIndicesSettingsResponse;
|
||||
import org.opensearch.client.opensearch.indices.IndexState;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexQueryServiceImpl implements IndexQueryService {
|
||||
|
||||
@SneakyThrows
|
||||
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
|
||||
|
||||
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
|
||||
|
||||
Optional<IndexState> optionalIndexState = getIndexState(searchConnection);
|
||||
if (optionalIndexState.isPresent()) {
|
||||
builder.indexFound(true);
|
||||
var indexSettings = optionalIndexState.get().settings();
|
||||
if (indexSettings != null) {
|
||||
builder.numberOfReplicas(indexSettings.numberOfReplicas()).numberOfShards(indexSettings.numberOfShards());
|
||||
}
|
||||
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
|
||||
|
||||
var opensearchClient = new OpensearchClient(searchConnection);
|
||||
var indexName = IndexNameHelper.getSearchIndex(opensearchClient.getSearchConnection().getIndexPrefix());
|
||||
try {
|
||||
GetIndicesSettingsResponse settings = opensearchClient.indices().getSettings(i -> i.index(indexName));
|
||||
return Optional.ofNullable(settings.get(indexName));
|
||||
} catch (OpenSearchException openSearchException) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,30 +1,25 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import lombok.Data;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.experimental.Delegate;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.opensearch.client.RestClient;
|
||||
import org.opensearch.client.RestClientBuilder;
|
||||
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
|
||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||
import org.opensearch.client.transport.rest_client.RestClientTransport;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
|
||||
import lombok.Data;
|
||||
import lombok.experimental.Delegate;
|
||||
|
||||
@Data
|
||||
@SuppressWarnings("PMD")
|
||||
public class OpensearchClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
|
||||
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
@ -37,11 +32,13 @@ public class OpensearchClient {
|
||||
HttpHost[] httpHost = searchConnection.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
|
||||
.collect(Collectors.toList())
|
||||
.toList()
|
||||
.toArray(new HttpHost[searchConnection.getHosts().size()]);
|
||||
|
||||
RestClientBuilder builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT).setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
var builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(0)
|
||||
.setConnectionRequestTimeout(ABSURD_HIGH_TIMEOUT)
|
||||
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
@ -56,10 +53,10 @@ public class OpensearchClient {
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void onShutdown() {
|
||||
@SneakyThrows
|
||||
public void terminate() {
|
||||
|
||||
client.shutdown();
|
||||
client._transport().close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -1,9 +1,6 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
@ -13,11 +10,11 @@ import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Connection;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.EncryptionDecryptionService;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
@ -38,37 +35,56 @@ public class OpensearchClientCache {
|
||||
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
|
||||
private Integer expireAfterAccess;
|
||||
|
||||
private LoadingCache<String, Connection> connections;
|
||||
private LoadingCache<Connection, OpensearchClient> clients;
|
||||
private LoadingCache<String, OpensearchClient> clients;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void isClientAliveOrTerminate() {
|
||||
|
||||
try {
|
||||
var client = clients.get(TenantContext.getTenantId());
|
||||
try {
|
||||
|
||||
log.info("Checking if client is still alive: {}", client.info());
|
||||
} catch (Exception e) {
|
||||
|
||||
try {
|
||||
client.terminate();
|
||||
} catch (Exception e2) {
|
||||
|
||||
log.info("Failed to terminate ES Client");
|
||||
clients.invalidate(TenantContext.getTenantId());
|
||||
}
|
||||
}
|
||||
}catch (Exception e){
|
||||
log.error("Failed to terminate/invalide client", e);
|
||||
}
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
protected void createCache() {
|
||||
|
||||
connections = CacheBuilder.newBuilder().maximumSize(maximumSize).expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES).build(new CacheLoader<>() {
|
||||
public Connection load(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
var hostsAsString = tenant.getSearchConnection().getHosts().stream().collect(Collectors.joining());
|
||||
return Connection.builder().hosts(hostsAsString).searchConnection(tenant.getSearchConnection()).build();
|
||||
}
|
||||
});
|
||||
|
||||
clients = CacheBuilder.newBuilder()
|
||||
.maximumSize(maximumSize)
|
||||
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<Connection, OpensearchClient>) removal -> {
|
||||
removal.getValue().shutdown();
|
||||
log.info("Closed opensearch client for tenant {}", removal.getKey().getHosts());
|
||||
.removalListener((RemovalListener<String, OpensearchClient>) removal -> {
|
||||
try {
|
||||
removal.getValue().terminate();
|
||||
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
|
||||
} catch (Exception e) {
|
||||
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
|
||||
}
|
||||
})
|
||||
.build(new CacheLoader<>() {
|
||||
public OpensearchClient load(Connection key) {
|
||||
public OpensearchClient load(String tenantId) {
|
||||
|
||||
if (key.getSearchConnection().getPassword() != null) {
|
||||
key.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(key.getSearchConnection().getPassword()));
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
|
||||
if (tenant.getSearchConnection().getPassword() != null) {
|
||||
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
|
||||
}
|
||||
var client = new OpensearchClient(key.getSearchConnection());
|
||||
log.info("Initialized opensearch client for tenant {}", key);
|
||||
var client = new OpensearchClient(tenant.getSearchConnection());
|
||||
log.info("Initialized elasticsearch client for tenant {}", tenantId);
|
||||
indexCreatorService.createIndex(client);
|
||||
return client;
|
||||
}
|
||||
@ -79,8 +95,7 @@ public class OpensearchClientCache {
|
||||
@SneakyThrows
|
||||
public OpensearchClient getClient() {
|
||||
|
||||
var connection = connections.get(TenantContext.getTenantId());
|
||||
return clients.get(connection);
|
||||
return clients.get(TenantContext.getTenantId());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -33,6 +33,7 @@ import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
@ -71,7 +72,8 @@ public class SearchServiceImpl implements SearchService {
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().query(convertQuery(query,
|
||||
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
@ -108,6 +110,7 @@ public class SearchServiceImpl implements SearchService {
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
@ -327,7 +330,7 @@ public class SearchServiceImpl implements SearchService {
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getInt("sectionNumber"))
|
||||
.sectionNumber(indexSection.getString("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
|
||||
@ -0,0 +1,16 @@
|
||||
package com.iqser.red.service.search.v1.server.utils;
|
||||
|
||||
import lombok.experimental.UtilityClass;
|
||||
|
||||
@UtilityClass
|
||||
public class IndexNameHelper {
|
||||
|
||||
private static final String SEARCH_INDEX = "%s_search";
|
||||
|
||||
|
||||
public String getSearchIndex(String indexPrefix) {
|
||||
|
||||
return String.format(SEARCH_INDEX, indexPrefix);
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
server:
|
||||
port: 8099
|
||||
|
||||
persistence-service.url: "http://localhost:8085"
|
||||
tenant-user-management-service.url: "http://localhost:8091/tenant-user-management/internal"
|
||||
@ -2,11 +2,28 @@ info:
|
||||
description: Search Service Server V1
|
||||
|
||||
persistence-service.url: "http://persistence-service-v1:8080"
|
||||
tenant-user-management-service.url: "http://tenant-user-management-service:8080/internal"
|
||||
|
||||
fforesight:
|
||||
tenants.remote: true
|
||||
tenant-exchange.name: 'tenants-exchange'
|
||||
|
||||
logging.pattern.level: "%5p [${spring.application.name},%X{traceId:-},%X{spanId:-}]"
|
||||
|
||||
logging.type: ${LOGGING_TYPE:CONSOLE}
|
||||
kubernetes.namespace: ${NAMESPACE:default}
|
||||
project.version: 1.0-SNAPSHOT
|
||||
|
||||
|
||||
server:
|
||||
port: 8080
|
||||
|
||||
lifecycle:
|
||||
base-package: com.iqser.red.service.search
|
||||
|
||||
spring:
|
||||
application:
|
||||
name: search-service
|
||||
main:
|
||||
allow-circular-references: true # FIXME
|
||||
profiles:
|
||||
@ -33,9 +50,11 @@ management:
|
||||
health.enabled: true
|
||||
endpoints.web.exposure.include: prometheus, health
|
||||
metrics.export.prometheus.enabled: ${monitoring.enabled:false}
|
||||
tracing:
|
||||
enabled: ${TRACING_ENABLED:false}
|
||||
sampling:
|
||||
probability: ${TRACING_PROBABILITY:1.0}
|
||||
otlp:
|
||||
tracing:
|
||||
endpoint: ${OTLP_ENDPOINT:http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces}
|
||||
|
||||
search:
|
||||
backend: elasticsearch
|
||||
|
||||
storage:
|
||||
backend: 's3'
|
||||
|
||||
@ -0,0 +1,17 @@
|
||||
<configuration>
|
||||
|
||||
<springProperty scope="configuration" name="logType" source="logging.type"/>
|
||||
<springProperty scope="context" name="application.name" source="spring.application.name"/>
|
||||
<springProperty scope="context" name="version" source="project.version"/>
|
||||
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
|
||||
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
|
||||
|
||||
<appender name="JSON" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder class="net.logstash.logback.encoder.LogstashEncoder"/>
|
||||
</appender>
|
||||
|
||||
<root level="INFO">
|
||||
<appender-ref ref="${logType}"/>
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@ -6,9 +6,12 @@ import java.util.Set;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
|
||||
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
@ -17,6 +20,7 @@ import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
@ -24,13 +28,15 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.TenantResponse;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.Application;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
@ComponentScan
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ -38,6 +44,8 @@ import com.iqser.red.storage.commons.service.StorageService;
|
||||
@ContextConfiguration(initializers = {AbstractElasticsearchIntegrationTest.Initializer.class})
|
||||
@EnableFeignClients(basePackageClasses = AbstractElasticsearchIntegrationTest.TestConfiguration.class)
|
||||
@DirtiesContext
|
||||
@AutoConfigureObservability
|
||||
@SuppressWarnings("PMD")
|
||||
public abstract class AbstractElasticsearchIntegrationTest {
|
||||
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
|
||||
@ -46,6 +54,12 @@ public abstract class AbstractElasticsearchIntegrationTest {
|
||||
@MockBean
|
||||
private TenantsClient tenantsClient;
|
||||
|
||||
@MockBean
|
||||
private RabbitAdmin rabbitAdmin;
|
||||
|
||||
@MockBean
|
||||
private RabbitListenerEndpointRegistry rabbitListenerEndpointRegistry;
|
||||
|
||||
private static int port;
|
||||
|
||||
|
||||
@ -54,8 +68,15 @@ public abstract class AbstractElasticsearchIntegrationTest {
|
||||
|
||||
TenantContext.setTenantId("redaction");
|
||||
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
|
||||
.searchConnection(SearchConnection.builder().hosts(Set.of("localhost")).port(port).scheme("http").numberOfShards("1").numberOfReplicas("5").build())
|
||||
.build());
|
||||
.searchConnection(SearchConnection.builder()
|
||||
.hosts(Set.of("localhost"))
|
||||
.port(port)
|
||||
.scheme("http")
|
||||
.numberOfShards("1")
|
||||
.numberOfReplicas("5")
|
||||
.indexPrefix("indexprefix")
|
||||
.build())
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@ -63,7 +84,7 @@ public abstract class AbstractElasticsearchIntegrationTest {
|
||||
|
||||
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
|
||||
|
||||
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.0").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
|
||||
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
|
||||
esContainer.getEnvMap().put("xpack.security.enabled", "false");
|
||||
esContainer.start();
|
||||
|
||||
@ -77,14 +98,15 @@ public abstract class AbstractElasticsearchIntegrationTest {
|
||||
protected StorageService storageService;
|
||||
|
||||
@Configuration
|
||||
@EnableAutoConfiguration(exclude = {StorageAutoConfiguration.class, RabbitAutoConfiguration.class})
|
||||
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
|
||||
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
|
||||
public static class TestConfiguration {
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public StorageService inmemoryStorage() {
|
||||
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
|
||||
|
||||
return new FileSystemBackedStorageService();
|
||||
return new FileSystemBackedStorageService(objectMapper);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -18,19 +18,22 @@ import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.SearchConnection;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.multitenancy.TenantResponse;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.Application;
|
||||
import com.iqser.red.service.search.v1.server.client.TenantsClient;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
@ComponentScan
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ -38,6 +41,7 @@ import com.iqser.red.storage.commons.service.StorageService;
|
||||
@ContextConfiguration(initializers = {AbstractOpensearchIntegrationTest.Initializer.class})
|
||||
@EnableFeignClients(basePackageClasses = AbstractOpensearchIntegrationTest.TestConfiguration.class)
|
||||
@DirtiesContext
|
||||
@SuppressWarnings("PMD")
|
||||
public abstract class AbstractOpensearchIntegrationTest {
|
||||
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
|
||||
@ -54,7 +58,14 @@ public abstract class AbstractOpensearchIntegrationTest {
|
||||
|
||||
TenantContext.setTenantId("redaction");
|
||||
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
|
||||
.searchConnection(SearchConnection.builder().hosts(Set.of("localhost")).port(port).scheme("http").numberOfShards("1").numberOfReplicas("5").build())
|
||||
.searchConnection(SearchConnection.builder()
|
||||
.hosts(Set.of("localhost"))
|
||||
.port(port)
|
||||
.scheme("http")
|
||||
.numberOfShards("1")
|
||||
.numberOfReplicas("5")
|
||||
.indexPrefix("indexprefix")
|
||||
.build())
|
||||
.build());
|
||||
}
|
||||
|
||||
@ -77,14 +88,15 @@ public abstract class AbstractOpensearchIntegrationTest {
|
||||
protected StorageService storageService;
|
||||
|
||||
@Configuration
|
||||
@EnableAutoConfiguration(exclude = {StorageAutoConfiguration.class, RabbitAutoConfiguration.class})
|
||||
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
|
||||
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
|
||||
public static class TestConfiguration {
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public StorageService inmemoryStorage() {
|
||||
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
|
||||
|
||||
return new FileSystemBackedStorageService();
|
||||
return new FileSystemBackedStorageService(objectMapper);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -7,11 +7,13 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.testcontainers.shaded.org.apache.commons.lang.StringUtils;
|
||||
import org.testcontainers.shaded.org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
|
||||
@ -1,121 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.commons.jackson.ObjectMapperFactory;
|
||||
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class FileSystemBackedStorageService implements StorageService {
|
||||
|
||||
private final Map<String, File> dataMap = new HashMap<>();
|
||||
|
||||
|
||||
public FileSystemBackedStorageService() {
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public InputStreamResource getObject(String tenantId, String objectId) {
|
||||
|
||||
var res = dataMap.get(objectId);
|
||||
if (res == null) {
|
||||
throw new StorageObjectDoesNotExist(new RuntimeException());
|
||||
}
|
||||
return new InputStreamResource(new FileInputStream(res));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void deleteObject(String tenantId, String objectId) {
|
||||
|
||||
dataMap.remove(objectId);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean objectExists(String tenantId, String objectId) {
|
||||
|
||||
return dataMap.containsKey(objectId);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
@SneakyThrows
|
||||
public <T> void storeJSONObject(String tenantId, String objectId, T any) {
|
||||
|
||||
File tempFile = File.createTempFile("test", ".tmp");
|
||||
getMapper().writeValue(new FileOutputStream(tempFile), any);
|
||||
dataMap.put(objectId, tempFile);
|
||||
}
|
||||
|
||||
|
||||
private ObjectMapper getMapper() {
|
||||
|
||||
return ObjectMapperFactory.create();
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
@SneakyThrows
|
||||
public <T> T readJSONObject(String tenantId, String objectId, Class<T> clazz) {
|
||||
|
||||
if (dataMap.get(objectId) == null || !dataMap.get(objectId).exists()) {
|
||||
throw new StorageObjectDoesNotExist("Stored object not found");
|
||||
}
|
||||
return getMapper().readValue(new FileInputStream(dataMap.get(objectId)), clazz);
|
||||
}
|
||||
|
||||
|
||||
public List<String> listPaths() {
|
||||
|
||||
return new ArrayList<>(dataMap.keySet());
|
||||
}
|
||||
|
||||
|
||||
public List<String> listFilePaths() {
|
||||
|
||||
return dataMap.values().stream().map(File::getAbsolutePath).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
@SneakyThrows
|
||||
public void storeObject(String tenantId, String objectId, InputStream stream) {
|
||||
|
||||
File tempFile = File.createTempFile("test", ".tmp");
|
||||
|
||||
try (var fileOutputStream = new FileOutputStream(tempFile)) {
|
||||
IOUtils.copy(stream, fileOutputStream);
|
||||
}
|
||||
|
||||
dataMap.put(objectId, tempFile);
|
||||
}
|
||||
|
||||
|
||||
public void clearStorage() {
|
||||
|
||||
this.dataMap.forEach((k, v) -> {
|
||||
v.delete();
|
||||
});
|
||||
this.dataMap.clear();
|
||||
}
|
||||
|
||||
}
|
||||
@ -3,7 +3,9 @@ package com.iqser.red.service.search.v1.server.service;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
@ -22,9 +24,9 @@ import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateC
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.controller.SearchController;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.multitenancy.TenantContext;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.utils.MetricValidationUtils;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import io.micrometer.prometheus.PrometheusMeterRegistry;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@ -7,11 +7,13 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.testcontainers.shaded.org.apache.commons.lang.StringUtils;
|
||||
import org.testcontainers.shaded.org.apache.commons.lang3.StringUtils;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
@ -54,6 +56,12 @@ public class OpensearchTest extends AbstractOpensearchIntegrationTest {
|
||||
@MockBean
|
||||
private RabbitTemplate rabbitTemplate;
|
||||
|
||||
@MockBean
|
||||
private RabbitAdmin rabbitAdmin;
|
||||
|
||||
@MockBean
|
||||
private RabbitListenerEndpointRegistry rabbitListenerEndpointRegistry;
|
||||
|
||||
@MockBean
|
||||
private IndexDeleteService indexDeleteService;
|
||||
|
||||
|
||||
@ -1,9 +1,17 @@
|
||||
|
||||
ribbon:
|
||||
ConnectTimeout: 600000
|
||||
ReadTimeout: 600000
|
||||
|
||||
logging.type: ${LOGGING_TYPE:CONSOLE}
|
||||
|
||||
logging.level.root: INFO
|
||||
|
||||
|
||||
fforesight:
|
||||
tenant-exchange:
|
||||
name: 'tenants-exchange'
|
||||
|
||||
spring:
|
||||
main:
|
||||
allow-bean-definition-overriding: true
|
||||
@ -21,3 +29,8 @@ management:
|
||||
metrics.export.prometheus.enabled: true
|
||||
|
||||
persistence-service.url: 'http://mock.url'
|
||||
|
||||
server:
|
||||
port: 19547
|
||||
|
||||
POD_NAME: search-service
|
||||
@ -2,204 +2,204 @@
|
||||
"numberOfPages": 9,
|
||||
"sectionTexts": [
|
||||
{
|
||||
"sectionNumber": 1,
|
||||
"sectionNumber": "1",
|
||||
"text": "Rule 0: Expand CBI Authors with firstname initials F. Lastname, J. Doe, M. Mustermann Lastname M., Doe J. Mustermann M."
|
||||
},
|
||||
{
|
||||
"sectionNumber": 2,
|
||||
"sectionNumber": "2",
|
||||
"text": "Rule 1/2: Redact CBI Authors based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No David Ksenia Max Mustermann Ranya Eikenboom Charalampos Schenk Tanja Schmitt ← should not be annotated, not in Dictionary"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 3,
|
||||
"sectionNumber": "3",
|
||||
"text": "Rule 3/4: Redact (not) CBI Add/ress based on Dict Dont Redact (mark as skipped) when Vertebrate Study is No Redact when Vertebrate Study is Yes Warnsveld, 7232 CX Warnsveld, Netherlands, NL Institut Industries, 33 Rue Jean Baffier, 18000 Bourges, France, FR 4-6 Chem. des Varennes, 18300 Saint-Satur, France, FR Lesdo Industries, Chäppelisträssli, 6078 Lungern, Switzerland Shlissel'burgskaya Ulitsa, Nizhny Novgorod Oblast, Russia, 603034, RU Karl Johans Gate 11, 0154 Oslo, Norway, NOR ← should not be annotated, not in Dictionary"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 4,
|
||||
"sectionNumber": "4",
|
||||
"text": "Rule 5: Do not redact genitive CBI_authors (Entries based on Dict) Expand to Hint Clarissa’s Donut ← not added to Dict, should be not annotated Simpson's Tower ← added to Authors-Dict, should be annotated"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 5,
|
||||
"sectionNumber": "5",
|
||||
"text": "Reference No Author(s) Year Title Laboratory"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 6,
|
||||
"sectionNumber": "6",
|
||||
"text": "BR2 /2 Michael N. 1998 The role of physical education in the school system. Weyland Industries"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 7,
|
||||
"sectionNumber": "7",
|
||||
"text": "BR3 /5 Funnarie B. 2001 It should be illegal to produce and sell tobacco Authentic Diagnostics"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 8,
|
||||
"sectionNumber": "8",
|
||||
"text": "ZZ/12 Feuer A. 1989 Social media is the real cause of teenage depression. Tyrell Corporation"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 10,
|
||||
"sectionNumber": "10",
|
||||
"text": "Rule 6-11 (Authors Table) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 11,
|
||||
"sectionNumber": "11",
|
||||
"text": "Rule 12/13: Redact/Hint if CTL or BL was found Redact when Vertebrate Study is Yes Hint when Vertebrate Study is No CTL/without dictionary entry CTL without Slash BL/without dictionary entry BL without Slash CTL/with dictionary entry 1234 with Slash CTL with dictionary entry 5678 without Slash BL/with dictionary entry 1234 with Slash BL with dictionary entry 5678 without Slash"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 12,
|
||||
"sectionNumber": "12",
|
||||
"text": "Rule 14/15: Redact and add recommendation for et al. Redact Term “Desiree”, “Melanie” and add to Recommendation CBI Authors if Vertebrate Study is Yes & No Lorem ipsum dolor sit amet, consectetur adipiscing elit Desiree et al sed do eiusmod tempor incididunt ut labore et dolore magna aliqua Melanie et al. Reference No 12345 Lorem ipsum."
|
||||
},
|
||||
{
|
||||
"sectionNumber": 13,
|
||||
"sectionNumber": "13",
|
||||
"text": "Rule 16/17: Add recommendation for Addresses in Test Organism/Animals sections Recommend only if Vertebrate Study is Yes, else do nothing Lorem ipsum dolor sit Species: Mouse; Source: Stark Industries"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 14,
|
||||
"sectionNumber": "14",
|
||||
"text": "Rule 16/17 (additional) negative Test; missing first Key Nothing should happen because of missing first/second keyword according to the rules Dont redact here because of missing first key; Source: Omni Consumer Products Dont redact here because missing first keyword; Source Resources Development Administration"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 15,
|
||||
"sectionNumber": "15",
|
||||
"text": "Rule 16/17 (additional) negative Test; missing second Key Dont redact here because of missing second key; Species: Mouse; Omni Consumer Products Dont redact here because missing second keyword; Species: Mouse, Resources Development Administration"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 16,
|
||||
"sectionNumber": "16",
|
||||
"text": "Rule 18: Do not redact Names and Addresses if Published Information found Do not redact Names and Addresses if Published Information found Lorem ipsum dolor sit amet Oxford University Press in voluptate velit esse cillum. Iakovos Geiger, Julian Ritter, Asya Lyon, Carina Madsen, Alexandra Häusler, Hanke Mendel, Ranya Eikenboom. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Min Kwok, Jun K., Tu Wong, Qiang Suen, Zhou Mah, Ning Liu, Lei W. Huang, Ru X. Wu"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 17,
|
||||
"sectionNumber": "17",
|
||||
"text": "Rule 19/20: Redacted PII Personal Identification Information based on Dict Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Naka-27 Aomachi, Nomi, Ishikawa 923-1101, Japan, JP Sude Halide Nurullah Özgür U. Reyhan B. Rahim C. J. Alfred Xinyi Y. Tao Clara Siegfried ← not added to Dict, should be not annotated"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 18,
|
||||
"sectionNumber": "18",
|
||||
"text": "Rule 21/22: Redact Emails by RegEx Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Duis aute irure dolor in library@outlook.com reprehenderit in voluptate gordonjcp@msn.com velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint dinther@comcast.net occaecat cupidatat non proident, sunt in kawasaki@me.com culpa qui officia deserunt mollit anim id est laborum."
|
||||
},
|
||||
{
|
||||
"sectionNumber": 19,
|
||||
"sectionNumber": "19",
|
||||
"text": "Description Text Contact Point"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 20,
|
||||
"sectionNumber": "20",
|
||||
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 22,
|
||||
"sectionNumber": "22",
|
||||
"text": "Rule 23/24: Redact contact information (contains \"Contact point:\") Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Contact-Information was found should be appears”"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 23,
|
||||
"sectionNumber": "23",
|
||||
"text": "Description Text Applicant"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 24,
|
||||
"sectionNumber": "24",
|
||||
"text": "Duis aute irure dolor in reprehenderit in voluptate velit esse cillum Contact Point dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. Contact point: Central Research Industry Phone: +49 2113 2311 563 Fax: +49 2113 2311 560 Tel.: +81 764770164 Tel: +81 6653 44563 E-mail: Seriknowmobil@co.uk Email: maximiliamschmitt@arcor.de e-mail: maximiliamschmitt@t-online.de E-mail address: example@mail.com Contact: Maximiliam Schmitt Telephone number: +27414328992 Telephone No: +274 1432 8991 Fax number: +274 1432 8990 Telephone: +274 34223331 Phone No. +274 1432 8933 Contact: 493 1223 4592 European contact: European Central Institute Alternative contact: Emilia Lockhart Alternative contact: Cyberdyne Systems Tower Defense 121a Hong Kong, BT District"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 26,
|
||||
"sectionNumber": "26",
|
||||
"text": "Rule 25/26: Redact contact information (contains \"Applicant\" as Headline or Text) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Applicant Information was found should be appears” Applicant Name: Soylent Corporation Contact point: Riddley Scott Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: food-industry@korea.com Contact: This is a special case, everything between this and the next keyword should be redacted Tel.: +275 5678 1234 132 fsdfdfre frefref"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 27,
|
||||
"sectionNumber": "27",
|
||||
"text": "Rule 27/28: Redact contact Information (contains Producer) Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No “Producer was found” should be appears Producer of the plant production Name: Umbrella Corporation Contact: Jill Valentine Address: 359-21 Huam-dong Yongsan-gu Seoul, South Korea Phone: +82 122 34188 Fax: +82 122 34180 E-mail: pharma-industry@korea.com"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 28,
|
||||
"sectionNumber": "28",
|
||||
"text": "Rule 29/30/31/32: If Text contains \"AUTHORS:\" and \"COMPLETION DATES\" but not \"STUDY COMPLETION DATES\", then Redact between both Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ AUTHOR(S): Dr. Alan Grant COMPLETION DATE: 02 December 1997"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 29,
|
||||
"sectionNumber": "29",
|
||||
"text": "Rule 29/30/31/32: (additional) negative Test for Study completion dates No Redaction should be appears here Study Report___ AUTHOR(S): Dr. Alan Grant STUDY COMPLETION DATE: 02 December 1997"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 30,
|
||||
"sectionNumber": "30",
|
||||
"text": "Rule 33/34: If Text contains \"Performing Lab\" and \"Lab Project ID\", then Redact everything between Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No Study Report___ PERFORMING LABORATORY: Umbrella Corporation LABORATORY PROJECT ID: Number 20201/33991/ERZAT/21"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 31,
|
||||
"sectionNumber": "31",
|
||||
"text": "Rule 35/36/37/38: ?? Tba"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 32,
|
||||
"sectionNumber": "32",
|
||||
"text": "Rule 39: Purity Hint Add Purity as Hint when Percent-Numbers is there Test Item: Soda Purity: 45% ← should be Hint Purity: <45% ← should be Hint Purity: >45% ← should be Hint Purity: 101% ← should ne be Hint because >100 % is not possible Purity: =>45% ← should be not Hint because additional symbols Purity: =<45% ← should be not Hint because additional symbols Purity: aa 45% ← should be not Hint because additional symbols Purity: 45% aa ← should be not Hint because additional symbols Purity: aa45% ← should be not Hint because additional symbols Purity: 45%aa ← should be not Hint because additional symbols Product-Code: EAK-L443 purity: 99% ← not Hint because case sensitive purity: >99% ← not Hint because case sensitive purity: <99% ← not Hint because case sensitive Supplier: GreenForce"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 33,
|
||||
"sectionNumber": "33",
|
||||
"text": "Rule 40: Ignore Dossier-Redaction if Confidentiality is not set Dont redact Dossier-Redaction if Confidentiality is not set in file attributes Excepteur sint occaecat cupidatat non proident, myDossierRedaction sunt in culpa qui officia deserunt mollit anim id est laborum."
|
||||
},
|
||||
{
|
||||
"sectionNumber": 34,
|
||||
"sectionNumber": "34",
|
||||
"text": "Rule 41/42: Redact Signatures Redact when Vertebrate Study is Yes Redact when Vertebrate Study is No __________________________ __________________________ Signed by: Dilara Sonnenschein Signed by: Tobias Müller"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 35,
|
||||
"sectionNumber": "35.1.1.3",
|
||||
"text": "Rule 43: Redact Logo Redact Logo only if Vertebrate Study is Yes, else do nothing (skipped)"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 36,
|
||||
"sectionNumber": "36",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 37,
|
||||
"sectionNumber": "37",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 38,
|
||||
"sectionNumber": "38",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 39,
|
||||
"sectionNumber": "39",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 40,
|
||||
"sectionNumber": "40",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 41,
|
||||
"sectionNumber": "41",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 42,
|
||||
"sectionNumber": "42",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 43,
|
||||
"sectionNumber": "43",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 44,
|
||||
"sectionNumber": "44",
|
||||
"text": "This is a Page-Header"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 45,
|
||||
"sectionNumber": "45",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 46,
|
||||
"sectionNumber": "46",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 47,
|
||||
"sectionNumber": "47",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 48,
|
||||
"sectionNumber": "48",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 49,
|
||||
"sectionNumber": "49",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 50,
|
||||
"sectionNumber": "50",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 51,
|
||||
"sectionNumber": "51",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 52,
|
||||
"sectionNumber": "52",
|
||||
"text": "This is a Page-Footer"
|
||||
},
|
||||
{
|
||||
"sectionNumber": 53,
|
||||
"sectionNumber": "53",
|
||||
"text": "This is a Page-Footer"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
5
settings.gradle.kts
Normal file
5
settings.gradle.kts
Normal file
@ -0,0 +1,5 @@
|
||||
rootProject.name = "search-service"
|
||||
include(":search-service-api-v1")
|
||||
include(":search-service-server-v1")
|
||||
project(":search-service-api-v1").projectDir = file("search-service-v1/search-service-api-v1")
|
||||
project(":search-service-server-v1").projectDir = file("search-service-v1/search-service-server-v1")
|
||||
Loading…
x
Reference in New Issue
Block a user