Compare commits
233 Commits
release/1.
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2e8020c9b | ||
|
|
e7f84c28d6 | ||
|
|
779acf6202 | ||
|
|
ff626afe78 | ||
|
|
ebf13d3be1 | ||
|
|
0693ddf197 | ||
|
|
c6d0361678 | ||
|
|
380b62333c | ||
|
|
f7a549c1a3 | ||
|
|
55e0386e31 | ||
|
|
599ed66773 | ||
|
|
8e717a5067 | ||
|
|
0a7b6cddb2 | ||
|
|
87b585c354 | ||
|
|
43bb08e301 | ||
|
|
d116d99db7 | ||
|
|
e0fb825cf7 | ||
|
|
18abdedaf8 | ||
|
|
fe28c1463a | ||
|
|
2157751b3e | ||
|
|
74f971757c | ||
|
|
79be8e272b | ||
|
|
d10b5b26e8 | ||
|
|
fd1cb84ebb | ||
|
|
7abf08e13b | ||
|
|
1cef042c1e | ||
|
|
4296abab94 | ||
|
|
3f7373f55d | ||
|
|
496c6a1da8 | ||
|
|
2234a5dbd8 | ||
|
|
03c6248b84 | ||
|
|
e59012ba4b | ||
|
|
e931522014 | ||
|
|
c877eea3cc | ||
|
|
dd53eeff93 | ||
|
|
a41a584499 | ||
|
|
9d6da4e7a3 | ||
|
|
25c90d1312 | ||
|
|
36e2ce5196 | ||
|
|
bb92e423f1 | ||
|
|
2a96ba5477 | ||
|
|
d4a77d3f70 | ||
|
|
982846c0c3 | ||
|
|
731aa80b69 | ||
|
|
9ee4c8cd91 | ||
|
|
e24dfe7fbe | ||
|
|
bf9540a986 | ||
|
|
0c66b4e505 | ||
|
|
64cbe4aaf9 | ||
|
|
01ab04ece7 | ||
|
|
1c21f72a08 | ||
|
|
bd690d6cc9 | ||
|
|
c44bd014d3 | ||
|
|
60842f58d0 | ||
|
|
ec56f833ff | ||
|
|
ee5cf2ff5a | ||
|
|
665d27085a | ||
|
|
ee698a9647 | ||
|
|
28ab0dc8cc | ||
|
|
da080f9671 | ||
|
|
4ae80fdffc | ||
|
|
c144f26099 | ||
|
|
eba029928e | ||
|
|
6b58e8b894 | ||
|
|
c72c881e6e | ||
|
|
f518d4ff9c | ||
|
|
65ccdb8257 | ||
|
|
4fcd310a66 | ||
|
|
be1b0eb27d | ||
|
|
b09132d5bf | ||
|
|
173428771b | ||
|
|
b1b42ce0d7 | ||
|
|
afd40a7cd7 | ||
|
|
8e8ad7b9e7 | ||
|
|
5da599bc9b | ||
|
|
0471b7f52c | ||
|
|
487f6c3190 | ||
|
|
325050fca6 | ||
|
|
56e693db37 | ||
|
|
b3c4e014c8 | ||
|
|
e822f64f58 | ||
|
|
64e0c20334 | ||
|
|
b9922bd48e | ||
|
|
5a43ce421a | ||
|
|
aa297096ed | ||
|
|
b89a04cd3c | ||
|
|
27c636fb36 | ||
|
|
7b00edc2d3 | ||
|
|
ee102be366 | ||
|
|
d59e61424f | ||
|
|
8e497256c6 | ||
|
|
0a0b5a9842 | ||
|
|
3f6abf730d | ||
|
|
0deb73e318 | ||
|
|
2f91ccf538 | ||
|
|
426ca8f133 | ||
|
|
aa22906ade | ||
|
|
b754a1abc5 | ||
|
|
41fbdaca71 | ||
|
|
b2c013ece4 | ||
|
|
8b8dc7c7b5 | ||
|
|
7742c2bbad | ||
|
|
b36248fff4 | ||
|
|
292fc63873 | ||
|
|
45ed8bd67a | ||
|
|
6ee6c00746 | ||
|
|
55b08215d4 | ||
|
|
75922bdf86 | ||
|
|
286aa0a23b | ||
|
|
a2c9ed15c2 | ||
|
|
f5b66a2f29 | ||
|
|
8368cf408c | ||
|
|
5d0c08f99b | ||
|
|
a7044ab79e | ||
|
|
239ebe4a77 | ||
|
|
060f4c4569 | ||
|
|
865e7c20e9 | ||
|
|
ea5a3fdbac | ||
|
|
2173f622d1 | ||
|
|
04863a0a7f | ||
|
|
009c208cc3 | ||
|
|
397cb19f30 | ||
|
|
bf02ee7c46 | ||
|
|
560160979c | ||
|
|
c3b1d98837 | ||
|
|
b5f1013c05 | ||
|
|
71c423330f | ||
|
|
2bac8b2baf | ||
|
|
e13df14bc9 | ||
|
|
c66f06c090 | ||
|
|
d58ac6217d | ||
|
|
fe0703127b | ||
|
|
8a5b4bbb04 | ||
|
|
d2ec9b84fc | ||
|
|
a50be57b66 | ||
|
|
6861261983 | ||
|
|
4fc666b28f | ||
|
|
014c5d98c2 | ||
|
|
007a59fd6a | ||
|
|
cdaddb3399 | ||
|
|
113b575b3a | ||
|
|
827ff2fe34 | ||
|
|
7793aa06ef | ||
|
|
9a13d44e8b | ||
|
|
a8ef4044dc | ||
|
|
f68c1cd661 | ||
|
|
dc1b2d7797 | ||
|
|
2506d0b238 | ||
|
|
4aa78fe1cd | ||
|
|
54a28fbf5b | ||
|
|
3cdcab5aef | ||
|
|
25ab03713d | ||
|
|
e1683bb777 | ||
|
|
a672ff90a9 | ||
|
|
79b8167001 | ||
|
|
53b38aeab8 | ||
|
|
220cf97785 | ||
|
|
df06ed421f | ||
|
|
18b52bdf5b | ||
|
|
3a7eef94cf | ||
|
|
3a4172648c | ||
|
|
ac0d132e48 | ||
|
|
5760e0f2e5 | ||
|
|
4fd8bd38b1 | ||
|
|
bad068b2ea | ||
|
|
bcda41e904 | ||
|
|
ba5d01cbc0 | ||
|
|
8e63117ee6 | ||
|
|
2abcc1e704 | ||
|
|
f83cd10be5 | ||
|
|
763d505b02 | ||
|
|
692f634d67 | ||
|
|
a4a150a4f2 | ||
|
|
fb7bf97ecf | ||
|
|
8fc88f1dc8 | ||
|
|
43d2fe25d1 | ||
|
|
448bfd93f8 | ||
|
|
d03759146e | ||
|
|
70ba63771d | ||
|
|
480d488b2c | ||
|
|
ca18b9226e | ||
|
|
3e121e9e06 | ||
|
|
655b504848 | ||
|
|
95422004d1 | ||
|
|
f6e9490e32 | ||
|
|
ceec00fb9e | ||
|
|
4ce0b2d0ef | ||
|
|
c87b517233 | ||
|
|
49e0a5de80 | ||
|
|
70617039e9 | ||
|
|
424afd3df7 | ||
|
|
1a02ed425f | ||
|
|
1aee789284 | ||
|
|
709fd8f0d6 | ||
|
|
cb0fa5b304 | ||
|
|
0c457c3b8b | ||
|
|
743130a7c8 | ||
|
|
19b9b6b5dc | ||
|
|
85057683d7 | ||
|
|
564b0d8695 | ||
|
|
c1d9c78649 | ||
|
|
c2b71f0da0 | ||
|
|
e4b8f6d1d2 | ||
|
|
cbb0669b57 | ||
|
|
d986c38eed | ||
|
|
e837108d7c | ||
|
|
737cb84c8d | ||
|
|
572d2462c9 | ||
|
|
bb3606b03b | ||
|
|
fab1932e7b | ||
|
|
591ebc71fe | ||
|
|
a027afdd76 | ||
|
|
901bce0b44 | ||
|
|
35a7a921d3 | ||
|
|
ab192f8703 | ||
|
|
a99824067b | ||
|
|
3d85456b31 | ||
|
|
0ebe18cdfe | ||
|
|
645411a72a | ||
|
|
9da1486624 | ||
|
|
de9f41cde6 | ||
|
|
8976800392 | ||
|
|
e294caf014 | ||
|
|
2b0a357659 | ||
|
|
5d211fb4c3 | ||
|
|
b72254b445 | ||
|
|
ac4e929a01 | ||
|
|
8eabce9cd7 | ||
|
|
261fafc57b | ||
|
|
bab813d02a | ||
|
|
6e183ee69b | ||
|
|
7e1294fa3c | ||
|
|
53bc39c90e |
9
.gitignore
vendored
9
.gitignore
vendored
@ -26,3 +26,12 @@
|
||||
**/.DS_Store
|
||||
**/classpath-data.json
|
||||
**/dependencies-and-licenses-overview.txt
|
||||
|
||||
|
||||
gradle.properties
|
||||
gradlew
|
||||
gradlew.bat
|
||||
gradle/
|
||||
|
||||
**/.gradle
|
||||
**/build
|
||||
|
||||
23
.gitlab-ci.yml
Normal file
23
.gitlab-ci.yml
Normal file
@ -0,0 +1,23 @@
|
||||
variables:
|
||||
SONAR_PROJECT_KEY: 'RED_search-service'
|
||||
include:
|
||||
- project: 'gitlab/gitlab'
|
||||
ref: 'main'
|
||||
file: 'ci-templates/gradle_java.yml'
|
||||
|
||||
deploy:
|
||||
stage: deploy
|
||||
tags:
|
||||
- dind
|
||||
script:
|
||||
- echo "Building with gradle version ${BUILDVERSION}"
|
||||
- gradle -Pversion=${BUILDVERSION} publish
|
||||
- gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=${BUILDVERSION}
|
||||
- echo "BUILDVERSION=$BUILDVERSION" >> version.env
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: version.env
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
- if: $CI_COMMIT_BRANCH =~ /^release/
|
||||
- if: $CI_COMMIT_TAG
|
||||
@ -1,37 +0,0 @@
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs-parent</artifactId>
|
||||
<version>7.2.2</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
|
||||
<artifactId>bamboo-specs</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<packaging>jar</packaging>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.atlassian.bamboo</groupId>
|
||||
<artifactId>bamboo-specs</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
<!-- run 'mvn test' to perform offline validation of the plan -->
|
||||
<!-- run 'mvn -Ppublish-specs' to upload the plan to your Bamboo server -->
|
||||
</project>
|
||||
@ -1,123 +0,0 @@
|
||||
package buildjob;
|
||||
|
||||
import com.atlassian.bamboo.specs.api.BambooSpec;
|
||||
import com.atlassian.bamboo.specs.api.builders.BambooKey;
|
||||
import com.atlassian.bamboo.specs.api.builders.docker.DockerConfiguration;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.PermissionType;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.Permissions;
|
||||
import com.atlassian.bamboo.specs.api.builders.permission.PlanPermissions;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Job;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.PlanIdentifier;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Stage;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.branches.BranchCleanup;
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.branches.PlanBranchManagement;
|
||||
import com.atlassian.bamboo.specs.api.builders.project.Project;
|
||||
import com.atlassian.bamboo.specs.builders.task.CheckoutItem;
|
||||
import com.atlassian.bamboo.specs.builders.task.InjectVariablesTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.ScriptTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.VcsCheckoutTask;
|
||||
import com.atlassian.bamboo.specs.builders.task.VcsTagTask;
|
||||
import com.atlassian.bamboo.specs.builders.trigger.BitbucketServerTrigger;
|
||||
import com.atlassian.bamboo.specs.model.task.InjectVariablesScope;
|
||||
import com.atlassian.bamboo.specs.util.BambooServer;
|
||||
import com.atlassian.bamboo.specs.builders.task.ScriptTask;
|
||||
import com.atlassian.bamboo.specs.model.task.ScriptTaskProperties.Location;
|
||||
|
||||
import static com.atlassian.bamboo.specs.builders.task.TestParserTask.createJUnitParserTask;
|
||||
|
||||
/**
|
||||
* Plan configuration for Bamboo.
|
||||
* Learn more on: <a href="https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs">https://confluence.atlassian.com/display/BAMBOO/Bamboo+Specs</a>
|
||||
*/
|
||||
@BambooSpec
|
||||
public class PlanSpec {
|
||||
|
||||
private static final String SERVICE_NAME = "search-service";
|
||||
|
||||
private static final String JVM_ARGS =" -Xmx4g -XX:+ExitOnOutOfMemoryError -XX:SurvivorRatio=2 -XX:NewRatio=1 -XX:InitialTenuringThreshold=16 -XX:MaxTenuringThreshold=16 -XX:InitiatingHeapOccupancyPercent=35 ";
|
||||
|
||||
private static final String SERVICE_KEY = SERVICE_NAME.toUpperCase().replaceAll("-", "");
|
||||
|
||||
/**
|
||||
* Run main to publish plan on Bamboo
|
||||
*/
|
||||
public static void main(final String[] args) throws Exception {
|
||||
//By default credentials are read from the '.credentials' file.
|
||||
BambooServer bambooServer = new BambooServer("http://localhost:8085");
|
||||
|
||||
Plan plan = new PlanSpec().createPlan();
|
||||
bambooServer.publish(plan);
|
||||
PlanPermissions planPermission = new PlanSpec().createPlanPermission(plan.getIdentifier());
|
||||
bambooServer.publish(planPermission);
|
||||
}
|
||||
|
||||
private PlanPermissions createPlanPermission(PlanIdentifier planIdentifier) {
|
||||
Permissions permission = new Permissions()
|
||||
.userPermissions("atlbamboo", PermissionType.EDIT, PermissionType.VIEW, PermissionType.ADMIN, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.groupPermissions("red-backend", PermissionType.EDIT, PermissionType.VIEW, PermissionType.CLONE, PermissionType.BUILD)
|
||||
.loggedInUserPermissions(PermissionType.VIEW)
|
||||
.anonymousUserPermissionView();
|
||||
return new PlanPermissions(planIdentifier.getProjectKey(), planIdentifier.getPlanKey()).permissions(permission);
|
||||
}
|
||||
|
||||
private Project project() {
|
||||
return new Project()
|
||||
.name("RED")
|
||||
.key(new BambooKey("RED"));
|
||||
}
|
||||
|
||||
public Plan createPlan() {
|
||||
return new Plan(
|
||||
project(),
|
||||
SERVICE_NAME, new BambooKey(SERVICE_KEY))
|
||||
.description("Plan created from (enter repository url of your plan)")
|
||||
.stages(new Stage("Default Stage")
|
||||
.jobs(new Job("Default Job",
|
||||
new BambooKey("JOB1"))
|
||||
.tasks(
|
||||
new ScriptTask()
|
||||
.description("Clean")
|
||||
.inlineBody("#!/bin/bash\n" +
|
||||
"set -e\n" +
|
||||
"rm -rf ./*"),
|
||||
new VcsCheckoutTask()
|
||||
.description("Checkout Default Repository")
|
||||
.checkoutItems(new CheckoutItem().defaultRepository()),
|
||||
new ScriptTask()
|
||||
.description("Build")
|
||||
.location(Location.FILE)
|
||||
.fileFromPath("bamboo-specs/src/main/resources/scripts/build-java.sh")
|
||||
.argument(SERVICE_NAME),
|
||||
createJUnitParserTask()
|
||||
.description("Resultparser")
|
||||
.resultDirectories("**/test-reports/*.xml, **/target/surefire-reports/*.xml, **/target/failsafe-reports/*.xml")
|
||||
.enabled(true),
|
||||
new InjectVariablesTask()
|
||||
.description("Inject git Tag")
|
||||
.path("git.tag")
|
||||
.namespace("g")
|
||||
.scope(InjectVariablesScope.LOCAL),
|
||||
new VcsTagTask()
|
||||
.description("${bamboo.g.gitTag}")
|
||||
.tagName("${bamboo.g.gitTag}")
|
||||
.defaultRepository())
|
||||
.dockerConfiguration(
|
||||
new DockerConfiguration()
|
||||
.image("nexus.iqser.com:5001/infra/maven:3.6.2-jdk-13-3.0.0")
|
||||
.dockerRunArguments("--net=host")
|
||||
.volume("/etc/maven/settings.xml", "/usr/share/maven/ref/settings.xml")
|
||||
.volume("/var/run/docker.sock", "/var/run/docker.sock")
|
||||
)
|
||||
)
|
||||
)
|
||||
.linkedRepositories("RED / " + SERVICE_NAME)
|
||||
|
||||
.triggers(new BitbucketServerTrigger())
|
||||
.planBranchManagement(new PlanBranchManagement()
|
||||
.createForVcsBranch()
|
||||
.delete(new BranchCleanup()
|
||||
.whenInactiveInRepositoryAfterDays(14))
|
||||
.notificationForCommitters());
|
||||
}
|
||||
}
|
||||
@ -1,51 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SERVICE_NAME=$1
|
||||
|
||||
if [[ "${bamboo_version_tag}" = "dev" ]]
|
||||
then
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
--no-transfer-progress \
|
||||
clean install \
|
||||
-Djava.security.egd=file:/dev/./urandomelse
|
||||
else
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
--no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
versions:set \
|
||||
-DnewVersion=${bamboo_version_tag}
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
--no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
versions:set \
|
||||
-DnewVersion=${bamboo_version_tag}
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-v1/pom.xml \
|
||||
--no-transfer-progress \
|
||||
clean deploy \
|
||||
-e \
|
||||
-DdeployAtEnd=true \
|
||||
-Dmaven.wagon.http.ssl.insecure=true \
|
||||
-Dmaven.wagon.http.ssl.allowall=true \
|
||||
-Dmaven.wagon.http.ssl.ignore.validity.dates=true \
|
||||
-DaltDeploymentRepository=iqser_release::default::https://nexus.iqser.com/repository/red-platform-releases
|
||||
fi
|
||||
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
--no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
package
|
||||
|
||||
${bamboo_capability_system_builder_mvn3_Maven_3}/bin/mvn \
|
||||
--no-transfer-progress \
|
||||
-f ${bamboo_build_working_directory}/$SERVICE_NAME-image-v1/pom.xml \
|
||||
docker:push
|
||||
|
||||
if [[ "${bamboo_version_tag}" = "dev" ]]
|
||||
then
|
||||
echo "gitTag=${bamboo_planRepository_1_branch}_${bamboo_buildNumber}" > git.tag
|
||||
else
|
||||
echo "gitTag=${bamboo_version_tag}" > git.tag
|
||||
fi
|
||||
@ -1,17 +0,0 @@
|
||||
package buildjob;
|
||||
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import com.atlassian.bamboo.specs.api.builders.plan.Plan;
|
||||
import com.atlassian.bamboo.specs.api.exceptions.PropertiesValidationException;
|
||||
import com.atlassian.bamboo.specs.api.util.EntityPropertiesBuilders;
|
||||
|
||||
public class PlanSpecTest {
|
||||
@Test
|
||||
public void checkYourPlanOffline() throws PropertiesValidationException {
|
||||
Plan plan = new PlanSpec().createPlan();
|
||||
|
||||
EntityPropertiesBuilders.build(plan);
|
||||
}
|
||||
}
|
||||
7
buildSrc/build.gradle.kts
Normal file
7
buildSrc/build.gradle.kts
Normal file
@ -0,0 +1,7 @@
|
||||
plugins {
|
||||
`kotlin-dsl`
|
||||
}
|
||||
|
||||
repositories {
|
||||
gradlePluginPortal()
|
||||
}
|
||||
@ -0,0 +1,60 @@
|
||||
plugins {
|
||||
`java-library`
|
||||
`maven-publish`
|
||||
pmd
|
||||
checkstyle
|
||||
jacoco
|
||||
}
|
||||
|
||||
repositories {
|
||||
mavenLocal()
|
||||
mavenCentral()
|
||||
maven {
|
||||
url = uri("https://nexus.knecon.com/repository/gindev/");
|
||||
credentials {
|
||||
username = providers.gradleProperty("mavenUser").getOrNull();
|
||||
password = providers.gradleProperty("mavenPassword").getOrNull();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
group = "com.iqser.red.service"
|
||||
|
||||
java.sourceCompatibility = JavaVersion.VERSION_17
|
||||
java.targetCompatibility = JavaVersion.VERSION_17
|
||||
|
||||
pmd {
|
||||
isConsoleOutput = true
|
||||
}
|
||||
|
||||
tasks.pmdMain {
|
||||
pmd.ruleSetFiles = files("${rootDir}/config/pmd/pmd.xml")
|
||||
}
|
||||
|
||||
tasks.pmdTest {
|
||||
pmd.ruleSetFiles = files("${rootDir}/config/pmd/test_pmd.xml")
|
||||
}
|
||||
|
||||
tasks.named<Test>("test") {
|
||||
useJUnitPlatform()
|
||||
reports {
|
||||
junitXml.outputLocation.set(layout.buildDirectory.dir("reports/junit"))
|
||||
}
|
||||
}
|
||||
|
||||
tasks.test {
|
||||
finalizedBy(tasks.jacocoTestReport) // report is always generated after tests run
|
||||
}
|
||||
|
||||
tasks.jacocoTestReport {
|
||||
dependsOn(tasks.test) // tests are required to run before generating the report
|
||||
reports {
|
||||
xml.required.set(true)
|
||||
csv.required.set(false)
|
||||
html.outputLocation.set(layout.buildDirectory.dir("jacocoHtml"))
|
||||
}
|
||||
}
|
||||
|
||||
java {
|
||||
withJavadocJar()
|
||||
}
|
||||
39
config/checkstyle/checkstyle.xml
Normal file
39
config/checkstyle/checkstyle.xml
Normal file
@ -0,0 +1,39 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE module PUBLIC "-//Puppy Crawl//DTD Check Configuration 1.3//EN"
|
||||
"http://www.puppycrawl.com/dtds/configuration_1_3.dtd">
|
||||
<module name="Checker">
|
||||
<property
|
||||
name="severity"
|
||||
value="error"/>
|
||||
<module name="TreeWalker">
|
||||
<module name="SuppressWarningsHolder"/>
|
||||
<module name="MissingDeprecated"/>
|
||||
<module name="MissingOverride"/>
|
||||
<module name="AnnotationLocation"/>
|
||||
<module name="JavadocStyle"/>
|
||||
<module name="NonEmptyAtclauseDescription"/>
|
||||
<module name="IllegalImport"/>
|
||||
<module name="RedundantImport"/>
|
||||
<module name="RedundantModifier"/>
|
||||
<module name="EmptyBlock"/>
|
||||
<module name="DefaultComesLast"/>
|
||||
<module name="EmptyStatement"/>
|
||||
<module name="EqualsHashCode"/>
|
||||
<module name="ExplicitInitialization"/>
|
||||
<module name="IllegalInstantiation"/>
|
||||
<module name="ModifiedControlVariable"/>
|
||||
<module name="MultipleVariableDeclarations"/>
|
||||
<module name="PackageDeclaration"/>
|
||||
<module name="ParameterAssignment"/>
|
||||
<module name="SimplifyBooleanExpression"/>
|
||||
<module name="SimplifyBooleanReturn"/>
|
||||
<module name="StringLiteralEquality"/>
|
||||
<module name="OneStatementPerLine"/>
|
||||
<module name="FinalClass"/>
|
||||
<module name="ArrayTypeStyle"/>
|
||||
<module name="UpperEll"/>
|
||||
<module name="OuterTypeFilename"/>
|
||||
</module>
|
||||
<module name="FileTabCharacter"/>
|
||||
<module name="SuppressWarningsFilter"/>
|
||||
</module>
|
||||
20
config/pmd/pmd.xml
Normal file
20
config/pmd/pmd.xml
Normal file
@ -0,0 +1,20 @@
|
||||
<?xml version="1.0"?>
|
||||
<ruleset name="Custom ruleset"
|
||||
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
|
||||
|
||||
<description>
|
||||
Knecon ruleset checks the code for bad stuff
|
||||
</description>
|
||||
|
||||
<rule ref="category/java/errorprone.xml">
|
||||
<exclude name="MissingSerialVersionUID"/>
|
||||
<exclude name="AvoidLiteralsInIfCondition"/>
|
||||
<exclude name="AvoidDuplicateLiterals"/>
|
||||
<exclude name="NullAssignment"/>
|
||||
<exclude name="AssignmentInOperand"/>
|
||||
<exclude name="BeanMembersShouldSerialize"/>
|
||||
</rule>
|
||||
|
||||
</ruleset>
|
||||
22
config/pmd/test_pmd.xml
Normal file
22
config/pmd/test_pmd.xml
Normal file
@ -0,0 +1,22 @@
|
||||
<?xml version="1.0"?>
|
||||
<ruleset name="Custom ruleset"
|
||||
xmlns="http://pmd.sourceforge.net/ruleset/2.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://pmd.sourceforge.net/ruleset/2.0.0 http://pmd.sourceforge.net/ruleset_2_0_0.xsd">
|
||||
|
||||
<description>
|
||||
Knecon test ruleset checks the code for bad stuff
|
||||
</description>
|
||||
|
||||
|
||||
<rule ref="category/java/errorprone.xml">
|
||||
<exclude name="MissingSerialVersionUID"/>
|
||||
<exclude name="AvoidLiteralsInIfCondition"/>
|
||||
<exclude name="AvoidDuplicateLiterals"/>
|
||||
<exclude name="NullAssignment"/>
|
||||
<exclude name="AssignmentInOperand"/>
|
||||
<exclude name="TestClassWithoutTestCases"/>
|
||||
<exclude name="BeanMembersShouldSerialize"/>
|
||||
</rule>
|
||||
|
||||
</ruleset>
|
||||
1
gradle.properties.kts
Normal file
1
gradle.properties.kts
Normal file
@ -0,0 +1 @@
|
||||
version = 2.0-SNAPSHOT
|
||||
21
pom.xml
21
pom.xml
@ -1,21 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
|
||||
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<modules>
|
||||
<module>bamboo-specs</module>
|
||||
<module>search-service-v1</module>
|
||||
<module>search-service-image-v1</module>
|
||||
</modules>
|
||||
|
||||
</project>
|
||||
15
publish-custom-image.sh
Executable file
15
publish-custom-image.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
dir=${PWD##*/}
|
||||
gradle assemble
|
||||
|
||||
# Get the current Git branch
|
||||
branch=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
# Get the short commit hash (first 5 characters)
|
||||
commit_hash=$(git rev-parse --short=5 HEAD)
|
||||
|
||||
# Combine branch and commit hash
|
||||
buildName="${USER}-${branch}-${commit_hash}"
|
||||
|
||||
gradle bootBuildImage --cleanCache --publishImage -PbuildbootDockerHostNetwork=true -Pversion=$buildName
|
||||
echo "nexus.knecon.com:5001/red/${dir}-server-v1:$buildName"
|
||||
6
renovate.json
Normal file
6
renovate.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
"config:base"
|
||||
]
|
||||
}
|
||||
@ -1,97 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<artifactId>platform-docker-dependency</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service-image-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
|
||||
<properties>
|
||||
<service.server>search-service-server-v1</service.server>
|
||||
<platform.jar>${service.server}.jar</platform.jar>
|
||||
<docker.skip.push>false</docker.skip.push>
|
||||
<docker.image.name>${docker.image.prefix}/${service.server}</docker.image.name>
|
||||
</properties>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-resources-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>exec-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>io.fabric8</groupId>
|
||||
<artifactId>docker-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
</plugins>
|
||||
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-dependency-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>download-platform-jar</id>
|
||||
<phase>prepare-package</phase>
|
||||
<goals>
|
||||
<goal>copy</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<artifactItems>
|
||||
<dependency>
|
||||
<groupId>${project.groupId}</groupId>
|
||||
<artifactId>${service.server}</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<type>jar</type>
|
||||
<overWrite>true</overWrite>
|
||||
<destFileName>${platform.jar}</destFileName>
|
||||
</dependency>
|
||||
</artifactItems>
|
||||
<outputDirectory>${docker.build.directory}</outputDirectory>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>io.fabric8</groupId>
|
||||
<artifactId>docker-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<images>
|
||||

|
||||
</images>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
</project>
|
||||
@ -1,7 +0,0 @@
|
||||
FROM red/base-image:1.0.0
|
||||
|
||||
ARG PLATFORM_JAR
|
||||
|
||||
ENV PLATFORM_JAR ${PLATFORM_JAR}
|
||||
|
||||
COPY ["${PLATFORM_JAR}", "/"]
|
||||
@ -1,39 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>platform-dependency</artifactId>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<version>1.1.3</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
|
||||
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<modules>
|
||||
<module>search-service-api-v1</module>
|
||||
<module>search-service-server-v1</module>
|
||||
</modules>
|
||||
|
||||
<dependencyManagement>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red</groupId>
|
||||
<artifactId>platform-commons-dependency</artifactId>
|
||||
<version>1.3.6</version>
|
||||
<scope>import</scope>
|
||||
<type>pom</type>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
||||
</dependencyManagement>
|
||||
|
||||
</project>
|
||||
28
search-service-v1/search-service-api-v1/build.gradle.kts
Normal file
28
search-service-v1/search-service-api-v1/build.gradle.kts
Normal file
@ -0,0 +1,28 @@
|
||||
plugins {
|
||||
id("com.iqser.red.service.java-conventions")
|
||||
id("io.freefair.lombok") version "8.4"
|
||||
}
|
||||
|
||||
description = "search-service-api-v1"
|
||||
|
||||
dependencies {
|
||||
implementation("org.springframework:spring-web:6.0.6")
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test:3.1.5")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
create<MavenPublication>(name) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
repositories {
|
||||
maven {
|
||||
url = uri("https://nexus.knecon.com/repository/red-platform-releases/")
|
||||
credentials {
|
||||
username = providers.gradleProperty("mavenUser").getOrNull();
|
||||
password = providers.gradleProperty("mavenPassword").getOrNull();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,21 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>search-service-api-v1</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-web</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
</project>
|
||||
@ -11,6 +11,7 @@ import lombok.NoArgsConstructor;
|
||||
@AllArgsConstructor
|
||||
public class IndexMessage {
|
||||
|
||||
private IndexMessageType messageType;
|
||||
private String dossierId;
|
||||
private String dossierTemplateId;
|
||||
private String fileId;
|
||||
|
||||
@ -0,0 +1,5 @@
|
||||
package com.iqser.red.service.search.v1.model;
|
||||
|
||||
public enum IndexMessageType {
|
||||
INSERT, UPDATE, DROP;
|
||||
}
|
||||
@ -23,9 +23,16 @@ public class MatchedDocument {
|
||||
private String dossierId;
|
||||
private String dossierTemplateId;
|
||||
private String fileId;
|
||||
private String dossierStatus;
|
||||
private String assignee;
|
||||
private Map<String, String> fileAttributes;
|
||||
private String workflowStatus;
|
||||
private boolean dossierDeleted;
|
||||
private boolean dossierArchived;
|
||||
private String fileName;
|
||||
|
||||
@Builder.Default
|
||||
private Map<String, Set<String>> highlights = new HashMap<>();
|
||||
private Map<String, List<String>> highlights = new HashMap<>();
|
||||
|
||||
@Builder.Default
|
||||
private Set<String> matchedTerms = new HashSet<>();
|
||||
|
||||
@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
|
||||
@AllArgsConstructor
|
||||
public class MatchedSection {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String headline;
|
||||
|
||||
@Builder.Default
|
||||
|
||||
@ -1,12 +1,13 @@
|
||||
package com.iqser.red.service.search.v1.model;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@ -17,6 +18,11 @@ public class SearchRequest {
|
||||
private List<String> dossierIds;
|
||||
private List<String> dossierTemplateIds;
|
||||
private String fileId;
|
||||
private String assignee;
|
||||
private boolean includeArchivedDossiers;
|
||||
private boolean includeDeletedDossiers;
|
||||
private String workflowStatus;
|
||||
private Map<String, String> fileAttributes;
|
||||
private int page;
|
||||
private int pageSize;
|
||||
private boolean returnSections;
|
||||
|
||||
@ -21,5 +21,4 @@ public class SearchResult {
|
||||
@Builder.Default
|
||||
private List<MatchedDocument> matchedDocuments = new ArrayList<>();
|
||||
|
||||
|
||||
}
|
||||
|
||||
@ -3,6 +3,7 @@ package com.iqser.red.service.search.v1.resources;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.PostMapping;
|
||||
import org.springframework.web.bind.annotation.RequestBody;
|
||||
import org.springframework.web.bind.annotation.ResponseBody;
|
||||
import org.springframework.web.bind.annotation.ResponseStatus;
|
||||
@ -14,9 +15,10 @@ public interface SearchResource {
|
||||
|
||||
String SEARCH_PATH = "/search";
|
||||
|
||||
|
||||
@ResponseBody
|
||||
@ResponseStatus(value = HttpStatus.OK)
|
||||
@GetMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
|
||||
@PostMapping(value = SEARCH_PATH, consumes = MediaType.APPLICATION_JSON_VALUE, produces = MediaType.APPLICATION_JSON_VALUE)
|
||||
SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest);
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,17 @@
|
||||
package com.iqser.red.service.search.v1;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
public class IdentityTest {
|
||||
|
||||
@Test
|
||||
public void mockTest() {
|
||||
|
||||
int i = 1;
|
||||
assertThat(i).isEqualTo(1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
73
search-service-v1/search-service-server-v1/build.gradle.kts
Normal file
73
search-service-v1/search-service-server-v1/build.gradle.kts
Normal file
@ -0,0 +1,73 @@
|
||||
import org.springframework.boot.gradle.tasks.bundling.BootBuildImage
|
||||
|
||||
plugins {
|
||||
application
|
||||
id("com.iqser.red.service.java-conventions")
|
||||
id("org.springframework.boot") version "3.1.5"
|
||||
id("io.spring.dependency-management") version "1.1.3"
|
||||
id("org.sonarqube") version "4.4.1.3373"
|
||||
id("io.freefair.lombok") version "8.4"
|
||||
}
|
||||
|
||||
description = "search-service-server-v1"
|
||||
|
||||
configurations {
|
||||
all {
|
||||
exclude(group = "commons-logging", module = "commons-logging")
|
||||
exclude(group = "org.springframework.boot", module = "spring-boot-starter-log4j2")
|
||||
exclude(group = "com.iqser.red.commons", module = "logging-commons")
|
||||
}
|
||||
}
|
||||
|
||||
val springBootStarterVersion = "3.1.5"
|
||||
|
||||
dependencies {
|
||||
api("com.knecon.fforesight:tenant-commons:0.30.0")
|
||||
api("com.knecon.fforesight:tracing-commons:0.5.0")
|
||||
api("com.knecon.fforesight:lifecycle-commons:0.6.0")
|
||||
api("com.google.guava:guava:31.1-jre")
|
||||
api("com.iqser.red.commons:storage-commons:2.45.0")
|
||||
api(project(":search-service-api-v1"))
|
||||
api("com.iqser.red.service:persistence-service-internal-api-v1:2.576.0-RED10106.0")
|
||||
api("com.iqser.red.commons:spring-commons:2.1.0")
|
||||
api("com.iqser.red.commons:metric-commons:2.1.0")
|
||||
api("com.iqser.red.commons:jackson-commons:2.1.0")
|
||||
api("co.elastic.clients:elasticsearch-java:8.6.2")
|
||||
api("org.opensearch.client:opensearch-rest-client:2.6.0")
|
||||
api("org.opensearch.client:opensearch-java:2.3.0")
|
||||
api("jakarta.json:jakarta.json-api:2.1.1")
|
||||
api("org.springframework.cloud:spring-cloud-starter-openfeign:4.0.4")
|
||||
api("org.springframework.boot:spring-boot-starter-aop:${springBootStarterVersion}")
|
||||
api("org.springframework.boot:spring-boot-starter-amqp:${springBootStarterVersion}")
|
||||
api("net.logstash.logback:logstash-logback-encoder:7.4")
|
||||
api("ch.qos.logback:logback-classic")
|
||||
testImplementation("org.springframework.boot:spring-boot-starter-test:${springBootStarterVersion}")
|
||||
testImplementation("com.iqser.red.commons:test-commons:2.1.0")
|
||||
testImplementation("org.testcontainers:elasticsearch:1.17.6")
|
||||
testImplementation("org.opensearch:opensearch-testcontainers:2.0.0")
|
||||
testImplementation("org.springframework.amqp:spring-rabbit-test:3.0.2")
|
||||
}
|
||||
|
||||
tasks.named<BootBuildImage>("bootBuildImage") {
|
||||
|
||||
environment.put("BPE_DELIM_JAVA_TOOL_OPTIONS", " ")
|
||||
environment.put("BPE_APPEND_JAVA_TOOL_OPTIONS", "-Dfile.encoding=UTF-8")
|
||||
|
||||
imageName.set("nexus.knecon.com:5001/red/${project.name}:${project.version}")
|
||||
if (project.hasProperty("buildbootDockerHostNetwork")) {
|
||||
network.set("host")
|
||||
}
|
||||
docker {
|
||||
if (project.hasProperty("buildbootDockerHostNetwork")) {
|
||||
bindHostToBuilder.set(true)
|
||||
}
|
||||
verboseLogging.set(true)
|
||||
|
||||
publishRegistry {
|
||||
username.set(providers.gradleProperty("mavenUser").getOrNull())
|
||||
password.set(providers.gradleProperty("mavenPassword").getOrNull())
|
||||
email.set(providers.gradleProperty("mavenEmail").getOrNull())
|
||||
url.set("https://nexus.knecon.com:5001/")
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,156 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<artifactId>search-service-v1</artifactId>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>search-service-server-v1</artifactId>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>storage-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<artifactId>search-service-api-v1</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<artifactId>file-management-service-api-v1</artifactId>
|
||||
<version>2.47.0</version>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>com.iqser.red.service</groupId>
|
||||
<artifactId>configuration-service-api-v1</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- commons -->
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>spring-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>logging-commons</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>metric-commons</artifactId>
|
||||
</dependency>
|
||||
<!-- other external -->
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>7.13.2</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<version>7.13.2</version>
|
||||
</dependency>
|
||||
|
||||
<!-- spring -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.cloud</groupId>
|
||||
<artifactId>spring-cloud-starter-openfeign</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-amqp</artifactId>
|
||||
<version>2.3.1.RELEASE</version>
|
||||
</dependency>
|
||||
|
||||
<!-- test dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.iqser.red.commons</groupId>
|
||||
<artifactId>test-commons</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<version>1.15.3</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.amqp</groupId>
|
||||
<artifactId>spring-rabbit-test</artifactId>
|
||||
<version>2.3.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<!-- generate git.properties for exposure in /info -->
|
||||
<groupId>pl.project13.maven</groupId>
|
||||
<artifactId>git-commit-id-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>revision</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<generateGitPropertiesFile>true</generateGitPropertiesFile>
|
||||
<gitDescribe>
|
||||
<tags>true</tags>
|
||||
</gitDescribe>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-jar-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>original-jar</id>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<classifier>original</classifier>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
|
||||
<plugin>
|
||||
<!-- repackages the generated jar into a runnable fat-jar and makes it
|
||||
executable -->
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>repackage</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<executable>true</executable>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
||||
@ -2,24 +2,31 @@ package com.iqser.red.service.search.v1.server;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.actuate.autoconfigure.security.servlet.ManagementWebSecurityAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
|
||||
import org.springframework.boot.autoconfigure.security.servlet.SecurityAutoConfiguration;
|
||||
import org.springframework.boot.context.properties.EnableConfigurationProperties;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
||||
import com.iqser.red.commons.spring.DefaultWebMvcConfiguration;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.FailfastElasticSearchHealthIndicator;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.knecon.fforesight.lifecyclecommons.LifecycleAutoconfiguration;
|
||||
import com.knecon.fforesight.tenantcommons.MultiTenancyAutoConfiguration;
|
||||
|
||||
@Import({DefaultWebMvcConfiguration.class})
|
||||
import io.micrometer.core.aop.TimedAspect;
|
||||
import io.micrometer.core.instrument.MeterRegistry;
|
||||
|
||||
@ImportAutoConfiguration({MultiTenancyAutoConfiguration.class, LifecycleAutoconfiguration.class})
|
||||
@Import({StorageAutoConfiguration.class})
|
||||
@EnableFeignClients(basePackageClasses = FileStatusClient.class)
|
||||
@EnableConfigurationProperties(ElasticsearchSettings.class)
|
||||
@EnableConfigurationProperties({ElasticsearchSettings.class, SearchServiceSettings.class})
|
||||
@SpringBootApplication(exclude = {SecurityAutoConfiguration.class, ManagementWebSecurityAutoConfiguration.class})
|
||||
@EnableAspectJAutoProxy
|
||||
public class Application {
|
||||
|
||||
public static void main(String[] args) {
|
||||
@ -29,10 +36,9 @@ public class Application {
|
||||
|
||||
|
||||
@Bean
|
||||
@ConditionalOnMissingBean
|
||||
public ElasticsearchClient elasticsearchClient(ElasticsearchSettings elasticsearchSettings) {
|
||||
public TimedAspect timedAspect(MeterRegistry registry) {
|
||||
|
||||
return new ElasticsearchClient(elasticsearchSettings);
|
||||
return new TimedAspect(registry);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.internal.resources.DossierResource;
|
||||
|
||||
@FeignClient(name = "DossierResource", url = "${persistence-service.url}")
|
||||
public interface DossierClient extends DossierResource {
|
||||
|
||||
}
|
||||
@ -1,73 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import javax.annotation.PreDestroy;
|
||||
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.experimental.Delegate;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class ElasticsearchClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 90_000_000;
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
@Delegate
|
||||
private RestHighLevelClient client;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
|
||||
HttpHost[] httpHost = settings.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, settings.getPort(), settings.getScheme()))
|
||||
.collect(Collectors.toList())
|
||||
.toArray(new HttpHost[settings.getHosts().size()]);
|
||||
|
||||
RestClientBuilder builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT)
|
||||
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (settings.getUsername() != null && !settings.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(settings.getUsername(), settings
|
||||
.getPassword()));
|
||||
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
|
||||
}
|
||||
|
||||
client = new RestHighLevelClient(builder);
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void shutdown() {
|
||||
|
||||
try {
|
||||
client.close();
|
||||
} catch (IOException e) {
|
||||
log.error(e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -2,9 +2,9 @@ package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
|
||||
import com.iqser.red.service.file.management.v1.api.resources.StatusResource;
|
||||
import com.iqser.red.service.persistence.service.v1.api.internal.resources.StatusResource;
|
||||
|
||||
@FeignClient(name = "StatusResource", url = "${file-management-service.url}")
|
||||
@FeignClient(name = "StatusResource", url = "${persistence-service.url}")
|
||||
public interface FileStatusClient extends StatusResource {
|
||||
|
||||
}
|
||||
|
||||
@ -2,8 +2,9 @@ package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
|
||||
import com.iqser.red.service.file.management.v1.api.resources.FileStatusProcessingUpdateResource;
|
||||
import com.iqser.red.service.persistence.service.v1.api.internal.resources.FileStatusProcessingUpdateResource;
|
||||
|
||||
@FeignClient(name = "FileStatusProcessingUpdateResource", url = "${file-management-service.url}")
|
||||
@FeignClient(name = "FileStatusProcessingUpdateResource", url = "${persistence-service.url}")
|
||||
public interface FileStatusProcessingUpdateClient extends FileStatusProcessingUpdateResource {
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
package com.iqser.red.service.search.v1.server.client;
|
||||
|
||||
import org.springframework.cloud.openfeign.FeignClient;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.internal.resources.IndexInformationResource;
|
||||
|
||||
@FeignClient(name = "IndexInformationResource", url = "${persistence-service.url}")
|
||||
public interface IndexInformationClient extends IndexInformationResource {
|
||||
|
||||
}
|
||||
@ -0,0 +1,119 @@
|
||||
package com.iqser.red.service.search.v1.server.configuration;
|
||||
|
||||
import org.springframework.amqp.core.Binding;
|
||||
import org.springframework.amqp.core.BindingBuilder;
|
||||
import org.springframework.amqp.core.DirectExchange;
|
||||
import org.springframework.amqp.core.Queue;
|
||||
import org.springframework.amqp.core.QueueBuilder;
|
||||
import org.springframework.amqp.core.TopicExchange;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class MessagingConfiguration {
|
||||
|
||||
public static final String INDEXING_REQUEST_QUEUE_PREFIX = "indexing_request";
|
||||
public static final String INDEXING_REQUEST_EXCHANGE = "indexing_request_exchange";
|
||||
public static final String INDEXING_DLQ = "indexing_error";
|
||||
|
||||
public static final String DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX = "delete_from_index_request";
|
||||
public static final String DELETE_FROM_INDEX_REQUEST_EXCHANGE = "delete_from_index_request_exchange";
|
||||
public static final String DELETE_FROM_INDEX_DLQ = "delete_from_index_error";
|
||||
|
||||
public static final String X_ERROR_INFO_HEADER = "x-error-message";
|
||||
public static final String X_ERROR_INFO_TIMESTAMP_HEADER = "x-error-message-timestamp";
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-delete-queue:search-service-tenant-delete}")
|
||||
private String tenantDeleteEventQueueName;
|
||||
@Value("${fforesight.multitenancy.tenant-delete-dlq:search-service-tenant-delete-error}")
|
||||
private String tenantDeleteDLQName;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
private String tenantUpdatedEventQueueName;
|
||||
@Value("${fforesight.multitenancy.tenant-updated-dlq:search-service-tenant-updated-error}")
|
||||
private String tenantUpdatedDLQName;
|
||||
|
||||
|
||||
@Bean
|
||||
public DirectExchange indexingRequestExchange() {
|
||||
|
||||
return new DirectExchange(INDEXING_REQUEST_EXCHANGE);
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingDLQ() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_DLQ).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public DirectExchange deleteFromIndexRequestExchange() {
|
||||
|
||||
return new DirectExchange(DELETE_FROM_INDEX_REQUEST_EXCHANGE);
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexDLQ() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Binding tenantExchangeDeleteBinding(@Qualifier("tenantUserManagementTenantDeleteQueue") Queue tenantUserManagementTenantDeleteQueue,
|
||||
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
|
||||
|
||||
return BindingBuilder.bind(tenantUserManagementTenantDeleteQueue).to(tenantExchange).with("tenant.delete");
|
||||
}
|
||||
|
||||
|
||||
@Bean("tenantUserManagementTenantDeleteQueue")
|
||||
public Queue tenantDeleteQueue() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantDeleteEventQueueName)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", this.tenantDeleteDLQName)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue tenantDeleteDLQ() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantDeleteDLQName).build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Binding tenantExchangeUpdatedBinding(@Qualifier("tenantUserManagementTenantUpdatedQueue") Queue tenantUserManagementTenantUpdatedQueue,
|
||||
@Qualifier("tenantExchange") TopicExchange tenantExchange) {
|
||||
|
||||
return BindingBuilder.bind(tenantUserManagementTenantUpdatedQueue).to(tenantExchange).with("tenant.updated");
|
||||
}
|
||||
|
||||
|
||||
@Bean("tenantUserManagementTenantUpdatedQueue")
|
||||
public Queue tenantUpdatedQueue() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantUpdatedEventQueueName)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", this.tenantUpdatedDLQName)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue tenantUpdatedDLQ() {
|
||||
|
||||
return QueueBuilder.durable(this.tenantUpdatedDLQName).build();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,11 @@
|
||||
package com.iqser.red.service.search.v1.server.configuration;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.queue.TenantMessagingConfiguration;
|
||||
|
||||
@Configuration
|
||||
public class TenantMessagingConfigurationImpl extends TenantMessagingConfiguration {
|
||||
|
||||
|
||||
}
|
||||
@ -19,9 +19,18 @@ public class SearchController implements SearchResource {
|
||||
|
||||
public SearchResult getDossierStatus(@RequestBody SearchRequest searchRequest) {
|
||||
|
||||
return searchService.search(searchRequest.getQueryString(), searchRequest.getDossierTemplateIds(),
|
||||
searchRequest.getDossierIds(), searchRequest.getFileId(), searchRequest
|
||||
.getPage(), searchRequest.getPageSize(), searchRequest.isReturnSections());
|
||||
return searchService.search(searchRequest.getQueryString(),
|
||||
searchRequest.getDossierTemplateIds(),
|
||||
searchRequest.getDossierIds(),
|
||||
searchRequest.getFileId(),
|
||||
searchRequest.getAssignee(),
|
||||
searchRequest.isIncludeDeletedDossiers(),
|
||||
searchRequest.isIncludeArchivedDossiers(),
|
||||
searchRequest.getWorkflowStatus(),
|
||||
searchRequest.getFileAttributes(),
|
||||
searchRequest.getPage(),
|
||||
searchRequest.getPageSize(),
|
||||
searchRequest.isReturnSections());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -2,12 +2,31 @@ package com.iqser.red.service.search.v1.server.exception;
|
||||
|
||||
public class IndexException extends RuntimeException {
|
||||
|
||||
public static final String INDEX_EXISTS_ERROR = "Unable to check, if index exists";
|
||||
public static final String CONTENT_TO_JSON_ERROR = "Could not convert document with id '%s' to JSON!";
|
||||
public static final String DOCUMENT_INDEX_ERROR = "Error during indexing document with id '%s'";
|
||||
public static final String DOCUMENT_DELETE_ERROR = "Error during deleting document with id '%s'";
|
||||
public static final String FAILED_TO_SEARCH = "Error during search";
|
||||
private static final String INDEX_EXISTS_ERROR = "Unable to check, if index exists";
|
||||
private static final String DOCUMENT_INDEX_ERROR = "Error during indexing document with id '%s'";
|
||||
private static final String DOCUMENT_UPDATE_ERROR = "Error during updating document with id '%s'";
|
||||
private static final String DOCUMENT_DELETE_ERROR = "Error during deleting document with id '%s'";
|
||||
private static final String FAILED_TO_SEARCH = "Error during search";
|
||||
|
||||
public static IndexException indexExists(Throwable cause){
|
||||
return new IndexException(INDEX_EXISTS_ERROR, cause);
|
||||
}
|
||||
|
||||
public static IndexException documentIndexError(String fileId, Throwable cause){
|
||||
return new IndexException(String.format(DOCUMENT_INDEX_ERROR, fileId), cause);
|
||||
}
|
||||
|
||||
public static IndexException documentUpdateError(String fileId, Throwable cause){
|
||||
return new IndexException(String.format(DOCUMENT_UPDATE_ERROR, fileId), cause);
|
||||
}
|
||||
|
||||
public static IndexException documentDeleteError(String fileId, Throwable cause){
|
||||
return new IndexException(String.format(DOCUMENT_DELETE_ERROR, fileId), cause);
|
||||
}
|
||||
|
||||
public static IndexException searchFailed(Throwable cause){
|
||||
return new IndexException(FAILED_TO_SEARCH, cause);
|
||||
}
|
||||
|
||||
public IndexException(String message) {
|
||||
|
||||
|
||||
@ -0,0 +1,49 @@
|
||||
package com.iqser.red.service.search.v1.server.migration;
|
||||
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.settings.SearchServiceSettings;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@SuppressWarnings("PMD")
|
||||
public class MigrationStarterService {
|
||||
|
||||
private final ApplicationContext ctx;
|
||||
private final IndexInformationService indexInformationService;
|
||||
private final IndexingMessageReceiver indexingMessageReceiver;
|
||||
private final SearchServiceSettings settings;
|
||||
private final TenantsClient tenantsClient;
|
||||
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void migrate() {
|
||||
|
||||
// This can only run in post upgrade hook, because otherwise the old service is still runnnig.
|
||||
if (settings.isMigrateOnly()) {
|
||||
tenantsClient.getTenants().forEach(tenant -> {
|
||||
TenantContext.setTenantId(tenant.getTenantId());
|
||||
if (indexInformationService.hasIndexChanged()) {
|
||||
log.info("Index has changed and will be closed, dropped, recreated and all files will be indexed");
|
||||
indexingMessageReceiver.receiveIndexingRequest(IndexMessage.builder().messageType(IndexMessageType.DROP).build());
|
||||
}
|
||||
});
|
||||
System.exit(SpringApplication.exit(ctx, () -> 0));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,21 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@AllArgsConstructor
|
||||
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
|
||||
public class Connection {
|
||||
|
||||
@EqualsAndHashCode.Include
|
||||
private String hosts;
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
}
|
||||
@ -1,8 +1,8 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
@ -22,9 +22,17 @@ public class IndexDocument implements Serializable {
|
||||
private String dossierId;
|
||||
private String fileId;
|
||||
private String filename;
|
||||
private OffsetDateTime date;
|
||||
|
||||
private Date date;
|
||||
private String assignee;
|
||||
private boolean dossierDeleted;
|
||||
private boolean dossierArchived;
|
||||
private String workflowStatus;
|
||||
|
||||
@Builder.Default
|
||||
private List<IndexSection> sections = new ArrayList<>();
|
||||
|
||||
@Builder.Default
|
||||
private List<IndexFileAttribute> fileAttributes = new ArrayList<>();
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,22 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class IndexDocumentUpdate {
|
||||
|
||||
private String assignee;
|
||||
private String workflowStatus;
|
||||
private boolean dossierDeleted;
|
||||
private boolean dossierArchived;
|
||||
private List<IndexFileAttribute> fileAttributes;
|
||||
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package com.iqser.red.service.search.v1.server.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class IndexFileAttribute {
|
||||
|
||||
private String name;
|
||||
private String value;
|
||||
|
||||
}
|
||||
@ -16,8 +16,9 @@ import lombok.NoArgsConstructor;
|
||||
@SuppressWarnings("serial")
|
||||
public class IndexSection implements Serializable {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String text;
|
||||
private Set<Integer> pages;
|
||||
private String headline;
|
||||
|
||||
}
|
||||
|
||||
@ -14,7 +14,7 @@ import lombok.NoArgsConstructor;
|
||||
@AllArgsConstructor
|
||||
public class SectionText {
|
||||
|
||||
private int sectionNumber;
|
||||
private String sectionNumber;
|
||||
private String headline;
|
||||
private String text;
|
||||
|
||||
|
||||
@ -0,0 +1,38 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DeleteTenantMessageReceiver {
|
||||
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
|
||||
private String tenantDeleteQueue;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
|
||||
log.info("Listener for tenant-delete started for queue: {}", this.tenantDeleteQueue);
|
||||
}
|
||||
|
||||
|
||||
@RabbitListener(queues = "${fforesight.multitenancy.tenant-delete-queue:tenant-delete-queue}")
|
||||
public void deleteTenant(TenantResponse tenant) {
|
||||
|
||||
indexDeleteService.dropIndex(tenant.getSearchConnection());
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,26 +1,44 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.DELETE_FROM_INDEX_QUEUE;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_DQL;
|
||||
import static com.iqser.red.service.search.v1.server.queue.MessagingConfiguration.INDEXING_QUEUE;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.DELETE_FROM_INDEX_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_DLQ;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.INDEXING_REQUEST_EXCHANGE;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_HEADER;
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.X_ERROR_INFO_TIMESTAMP_HEADER;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.List;
|
||||
|
||||
import org.springframework.amqp.AmqpRejectAndDontRequeueException;
|
||||
import org.springframework.amqp.core.Message;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.file.management.v1.api.model.FileStatus;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileErrorInfo;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDocumentConverterService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexInformationService;
|
||||
import com.iqser.red.service.search.v1.server.service.TextStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@ -28,61 +46,191 @@ import lombok.extern.slf4j.Slf4j;
|
||||
@RequiredArgsConstructor
|
||||
public class IndexingMessageReceiver {
|
||||
|
||||
public static final String INDEXING_LISTENER_ID = "indexing-listener";
|
||||
public static final String DELETE_FROM_INDEX_LISTENER_ID = "delete-from-index-listener";
|
||||
|
||||
private final ObjectMapper objectMapper;
|
||||
private final TextStorageService textStorageService;
|
||||
private final DocumentIndexService documentIndexService;
|
||||
private final FileStatusClient fileStatusClient;
|
||||
private final DossierClient dossierClient;
|
||||
private final FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
private final RabbitTemplate rabbitTemplate;
|
||||
|
||||
private final DocumentDeleteService documentDeleteService;
|
||||
private final DocumentUpdateService documentUpdateService;
|
||||
private final DocumentIndexService documentIndexService;
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
private final IndexInformationService indexInformationService;
|
||||
private final IndexDocumentConverterService indexDocumentConverterService;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_QUEUE)
|
||||
public void receiveIndexingRequest(String in) throws JsonProcessingException {
|
||||
@RabbitListener(id = INDEXING_LISTENER_ID)
|
||||
public void receiveIndexingRequest(Message message) {
|
||||
|
||||
var indexRequest = objectMapper.readValue(message.getBody(), IndexMessage.class);
|
||||
|
||||
// This prevents from endless retries oom errors.
|
||||
if (message.getMessageProperties().isRedelivered()) {
|
||||
throw new AmqpRejectAndDontRequeueException(String.format("Error during last processing of request with dossierId: %s and fileId: %s, do not retry.",
|
||||
indexRequest.getDossierId(),
|
||||
indexRequest.getFileId()));
|
||||
}
|
||||
|
||||
try {
|
||||
receiveIndexingRequest(indexRequest);
|
||||
} catch (Exception e) {
|
||||
log.warn("An exception occurred in processing the indexing request stage: ", e);
|
||||
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
|
||||
message.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void receiveIndexingRequest(IndexMessage indexRequest) {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
log.info("Processing indexing request: {}", indexRequest);
|
||||
fileStatusProcessingUpdateClient.indexing(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
FileStatus fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
Text text = textStorageService.getText(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
documentIndexService.indexDocument( indexRequest.getDossierTemplateId(),indexRequest.getDossierId(), indexRequest.getFileId(), fileStatus.getFilename(), text);
|
||||
fileStatusProcessingUpdateClient.indexingSuccessful(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Successfully indexed {}", indexRequest);
|
||||
|
||||
FileModel fileStatus;
|
||||
Dossier dossier;
|
||||
switch (indexRequest.getMessageType()) {
|
||||
case INSERT:
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
indexFile(dossier, fileStatus);
|
||||
break;
|
||||
|
||||
case UPDATE:
|
||||
fileStatus = fileStatusClient.getFileStatus(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
dossier = dossierClient.getDossierById(indexRequest.getDossierId(), true, true);
|
||||
|
||||
if(documentUpdateService.documentExists(indexRequest.getFileId())) {var indexUpdateDocument = indexDocumentConverterService.convertUpdateDocument(fileStatus.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
fileStatus.getWorkflowStatus().name(),
|
||||
fileStatus.getFileAttributes());
|
||||
|
||||
documentUpdateService.updateDocument(indexRequest.getFileId(), indexUpdateDocument);
|
||||
log.info("Successfully updated {}", indexRequest);
|
||||
} else {
|
||||
indexFile(dossier, fileStatus);
|
||||
}
|
||||
break;
|
||||
|
||||
case DROP:
|
||||
indexDeleteService.recreateIndex();
|
||||
addAllDocumentsToIndexQueue();
|
||||
indexInformationService.updateIndexInformation();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("MessageType '" + indexRequest.getMessageType() + "' does not exist");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = INDEXING_DQL)
|
||||
public void receiveIndexingRequestDQL(String in) throws JsonProcessingException {
|
||||
@RabbitListener(queues = INDEXING_DLQ)
|
||||
public void receiveIndexingRequestDQL(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
String errorLog = "Failed to process indexing request:";
|
||||
log.info(errorLog + ": {}", indexRequest);
|
||||
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
|
||||
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
|
||||
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
|
||||
indexRequest.getFileId(),
|
||||
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process indexing request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_QUEUE)
|
||||
public void receiveDeleteDocumentRequest(String in) throws JsonProcessingException {
|
||||
@RabbitListener(id = DELETE_FROM_INDEX_LISTENER_ID)
|
||||
public void receiveDeleteDocumentRequest(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
log.info("Processing delete document request: {}", indexRequest);
|
||||
documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
try {
|
||||
documentDeleteService.deleteDocument(indexRequest.getFileId());
|
||||
log.info("Successfully deleted document with dossierId {} and fileId {}", indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
} catch (Exception e) {
|
||||
log.warn("An exception occurred in processing delete document stage: {}", e.getMessage());
|
||||
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_HEADER, e.getMessage());
|
||||
in.getMessageProperties().getHeaders().put(X_ERROR_INFO_TIMESTAMP_HEADER, OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS));
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = DELETE_FROM_INDEX_DLQ)
|
||||
public void receiveDeleteDocumentRequestDLQ(String in) throws JsonProcessingException {
|
||||
public void receiveDeleteDocumentRequestDLQ(Message in) throws IOException {
|
||||
|
||||
var indexRequest = objectMapper.readValue(in.getBody(), IndexMessage.class);
|
||||
String errorLog = "Failed to process delete from index request ";
|
||||
log.info(errorLog + ": {}", indexRequest);
|
||||
String errorMessage = errorLog + in.getMessageProperties().getHeader(X_ERROR_INFO_HEADER);
|
||||
OffsetDateTime timestamp = in.getMessageProperties().getHeader(X_ERROR_INFO_TIMESTAMP_HEADER);
|
||||
timestamp = timestamp != null ? timestamp : OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(),
|
||||
indexRequest.getFileId(),
|
||||
new FileErrorInfo(errorMessage, INDEXING_DLQ, "search-service", timestamp));
|
||||
|
||||
var indexRequest = objectMapper.readValue(in, IndexMessage.class);
|
||||
fileStatusProcessingUpdateClient.indexingFailed(indexRequest.getDossierId(), indexRequest.getFileId());
|
||||
log.info("Failed to process delete from index request: {}", indexRequest);
|
||||
}
|
||||
|
||||
|
||||
private void indexFile(Dossier dossier, FileModel file) {
|
||||
|
||||
fileStatusProcessingUpdateClient.indexing(dossier.getId(), file.getId());
|
||||
Text text = textStorageService.getText(dossier.getId(), file.getId());
|
||||
|
||||
var indexDocument = indexDocumentConverterService.convert(dossier.getDossierTemplateId(),
|
||||
dossier.getId(),
|
||||
file.getId(),
|
||||
file.getFilename(),
|
||||
text,
|
||||
file.getAssignee(),
|
||||
dossier.getSoftDeletedTime() != null,
|
||||
dossier.getArchivedTime() != null,
|
||||
file.getWorkflowStatus(),
|
||||
file.getFileAttributes());
|
||||
|
||||
documentIndexService.indexDocument(indexDocument);
|
||||
fileStatusProcessingUpdateClient.indexingSuccessful(dossier.getId(), file.getId());
|
||||
log.info("Successfully indexed dossier {} file {}", dossier.getId(), file.getId());
|
||||
}
|
||||
|
||||
|
||||
private void addAllDocumentsToIndexQueue() {
|
||||
|
||||
var allDossiers = dossierClient.getAllDossiers(true, true);
|
||||
for (Dossier dossier : allDossiers) {
|
||||
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getDossierStatus(dossier.getId()));
|
||||
addFilesToIndexingQueue(dossier.getId(), fileStatusClient.getSoftDeletedDossierStatus(dossier.getId()));
|
||||
}
|
||||
log.info("Successfully added all files from all dossiers to index queue (including archived and deleted)");
|
||||
}
|
||||
|
||||
|
||||
private void addFilesToIndexingQueue(String dossierId, List<FileModel> files) {
|
||||
|
||||
for (FileModel file : files) {
|
||||
log.info("Will add dossier {} file {} to index queue", dossierId, file.getId());
|
||||
rabbitTemplate.convertAndSend(INDEXING_REQUEST_EXCHANGE,
|
||||
TenantContext.getTenantId(),
|
||||
IndexMessage.builder().messageType(IndexMessageType.INSERT).dossierId(dossierId).fileId(file.getId()).build(),
|
||||
message -> {
|
||||
message.getMessageProperties().setPriority(99);
|
||||
return message;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -1,58 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.core.Queue;
|
||||
import org.springframework.amqp.core.QueueBuilder;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Configuration
|
||||
@RequiredArgsConstructor
|
||||
public class MessagingConfiguration {
|
||||
|
||||
public static final String INDEXING_QUEUE = "indexingQueue";
|
||||
public static final String INDEXING_DQL = "indexingDQL";
|
||||
|
||||
public static final String DELETE_FROM_INDEX_QUEUE = "deleteFromIndexQueue";
|
||||
public static final String DELETE_FROM_INDEX_DLQ = "deleteFromIndexDLQ";
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingQueue() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_QUEUE)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", INDEXING_DQL)
|
||||
.maxPriority(2)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue indexingDeadLetterQueue() {
|
||||
|
||||
return QueueBuilder.durable(INDEXING_DQL).build();
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexQueue() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_QUEUE)
|
||||
.withArgument("x-dead-letter-exchange", "")
|
||||
.withArgument("x-dead-letter-routing-key", DELETE_FROM_INDEX_DLQ)
|
||||
.maxPriority(2)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
public Queue deleteFromIndexDLQ() {
|
||||
|
||||
return QueueBuilder.durable(DELETE_FROM_INDEX_DLQ).build();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,74 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.configuration.MessagingConfiguration.*;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitHandler;
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.boot.context.event.ApplicationReadyEvent;
|
||||
import org.springframework.context.event.EventListener;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.TenantProvider;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantCreatedEvent;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantQueueConfiguration;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
import com.knecon.fforesight.tenantcommons.queue.RabbitQueueFromExchangeService;
|
||||
import com.knecon.fforesight.tenantcommons.queue.TenantExchangeMessageReceiver;
|
||||
|
||||
@Service
|
||||
public class TenantExchangeMessageReceiverImpl extends TenantExchangeMessageReceiver {
|
||||
|
||||
public TenantExchangeMessageReceiverImpl(RabbitQueueFromExchangeService rabbitQueueService, TenantProvider tenantProvider) {
|
||||
|
||||
super(rabbitQueueService, tenantProvider);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected Set<TenantQueueConfiguration> getTenantQueueConfigs() {
|
||||
|
||||
return Set.of(TenantQueueConfiguration.builder()
|
||||
.listenerId(IndexingMessageReceiver.INDEXING_LISTENER_ID)
|
||||
.exchangeName(INDEXING_REQUEST_EXCHANGE)
|
||||
.queuePrefix(INDEXING_REQUEST_QUEUE_PREFIX)
|
||||
.dlqName(INDEXING_DLQ)
|
||||
.arguments(Map.of("x-max-priority", 2))
|
||||
.build(),
|
||||
TenantQueueConfiguration.builder()
|
||||
.listenerId(IndexingMessageReceiver.DELETE_FROM_INDEX_LISTENER_ID)
|
||||
.exchangeName(DELETE_FROM_INDEX_REQUEST_EXCHANGE)
|
||||
.queuePrefix(DELETE_FROM_INDEX_REQUEST_QUEUE_PREFIX)
|
||||
.dlqName(DELETE_FROM_INDEX_DLQ)
|
||||
.arguments(Map.of("x-max-priority", 2))
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@EventListener(ApplicationReadyEvent.class)
|
||||
public void onApplicationReady() {
|
||||
|
||||
System.out.println("application ready invoked");
|
||||
super.initializeQueues();
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantCreatedQueueName()}")
|
||||
public void reactToTenantCreation(TenantCreatedEvent tenantCreatedEvent) {
|
||||
|
||||
super.reactToTenantCreation(tenantCreatedEvent);
|
||||
}
|
||||
|
||||
|
||||
@RabbitHandler
|
||||
@RabbitListener(queues = "#{tenantMessagingConfigurationImpl.getTenantDeletedQueueName()}")
|
||||
public void reactToTenantDeletion(TenantResponse tenantResponse) {
|
||||
|
||||
super.reactToTenantDeletion(tenantResponse);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,48 @@
|
||||
package com.iqser.red.service.search.v1.server.queue;
|
||||
|
||||
import org.springframework.amqp.rabbit.annotation.RabbitListener;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class UpdatedTenantMessageReceiver {
|
||||
|
||||
private final IndexQueryService indexQueryService;
|
||||
private final IndexDeleteService indexDeleteService;
|
||||
|
||||
@Value("${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
private String tenantUpdatedQueue;
|
||||
|
||||
|
||||
@PostConstruct
|
||||
public void postConstruct() {
|
||||
|
||||
log.info("Listener for tenant updated events started for queue: {}", this.tenantUpdatedQueue);
|
||||
}
|
||||
|
||||
|
||||
@RabbitListener(queues = "${fforesight.multitenancy.tenant-updated-queue:search-service-tenant-updated}")
|
||||
public void updateTenant(TenantResponse tenant) {
|
||||
|
||||
String numberOfReplicas = tenant.getSearchConnection().getNumberOfReplicas();
|
||||
String numberOfShards = tenant.getSearchConnection().getNumberOfShards();
|
||||
IndexQueryResult queryResult = indexQueryService.getIndexQueryResult(tenant.getSearchConnection());
|
||||
|
||||
if (queryResult.isIndexFound() && (!numberOfReplicas.equals(queryResult.getNumberOfReplicas()) || !numberOfShards.equals(queryResult.getNumberOfShards()))) {
|
||||
log.info("Number of shards or replicas were changed during tenant update, indices will be recreated");
|
||||
indexDeleteService.recreateIndex(tenant.getSearchConnection());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,38 +1,8 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
public interface DocumentDeleteService {
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentDeleteService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest(INDEX_NAME).id(fileId).setRefreshPolicy(settings.getRefreshPolicy());
|
||||
|
||||
try {
|
||||
client.delete(request, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_DELETE_ERROR, fileId), e);
|
||||
}
|
||||
}
|
||||
void deleteDocument(String fileId);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,85 +1,10 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexSection;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionArea;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionText;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class DocumentIndexService {
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
private final ObjectMapper objectMapper;
|
||||
public interface DocumentIndexService {
|
||||
|
||||
|
||||
public void indexDocument(String dossierTemplateId, String dossierId, String fileId, String filename, Text text) {
|
||||
|
||||
IndexRequest indexRequest = new IndexRequest(INDEX_NAME).id(fileId);
|
||||
indexRequest.setRefreshPolicy(settings.getRefreshPolicy());
|
||||
indexRequest.source(toJson(convert(dossierTemplateId, dossierId, fileId, filename, text)), XContentType.JSON);
|
||||
|
||||
try {
|
||||
client.index(indexRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(String.format(IndexException.DOCUMENT_INDEX_ERROR, fileId), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String toJson(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
return objectMapper.writeValueAsString(indexDocument);
|
||||
} catch (JsonProcessingException e) {
|
||||
throw new IndexException(String.format(IndexException.CONTENT_TO_JSON_ERROR, indexDocument.getFileId()), e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private IndexDocument convert(String dossierTemplateId, String dossierId, String fileId, String filename, Text text) {
|
||||
|
||||
return IndexDocument.builder()
|
||||
.dossierTemplateId(dossierTemplateId)
|
||||
.dossierId(dossierId)
|
||||
.fileId(fileId)
|
||||
.filename(filename)
|
||||
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
|
||||
.date(OffsetDateTime.now())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private IndexSection convert(SectionText sectionText) {
|
||||
|
||||
return IndexSection.builder()
|
||||
.sectionNumber(sectionText.getSectionNumber())
|
||||
.text(sectionText.getText())
|
||||
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
|
||||
.headline(sectionText.getHeadline())
|
||||
.build();
|
||||
}
|
||||
void indexDocument(IndexDocument indexDocument);
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,10 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
|
||||
public interface DocumentUpdateService {
|
||||
|
||||
void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate);
|
||||
boolean documentExists(String fileId);
|
||||
|
||||
}
|
||||
@ -1,74 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.INDEX_EXISTS_ERROR;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.indices.GetIndexRequest;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.ResourceLoader;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
public class IndexCreatorService {
|
||||
|
||||
public static final String INDEX_NAME = "redaction";
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public IndexCreatorService(ElasticsearchClient client, ElasticsearchSettings settings) {
|
||||
|
||||
this.client = client;
|
||||
this.settings = settings;
|
||||
|
||||
if (!indexExists()) {
|
||||
createIndex();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void createIndex() {
|
||||
|
||||
String indexMapping = ResourceLoader.load("index/mapping.json");
|
||||
|
||||
Settings.Builder settingsBuilder = Settings.builder()
|
||||
.put("number_of_shards", settings.getNumberOfShards())
|
||||
.put("number_of_replicas", settings.getNumberOfReplicas())
|
||||
.put("index.mapping.nested_objects.limit", settings.getNumberOfNestedObjectLimit());
|
||||
|
||||
CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME).settings(settingsBuilder.build())
|
||||
.mapping(indexMapping, XContentType.JSON);
|
||||
|
||||
try {
|
||||
CreateIndexResponse response = client.indices().create(createIndexRequest, RequestOptions.DEFAULT);
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists() {
|
||||
|
||||
GetIndexRequest getIndexRequest = new GetIndexRequest(INDEX_NAME);
|
||||
try {
|
||||
return client.indices().exists(getIndexRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(INDEX_EXISTS_ERROR, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,19 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
public interface IndexDeleteService {
|
||||
|
||||
void recreateIndex();
|
||||
|
||||
void recreateIndex(SearchConnection searchConnection);
|
||||
|
||||
|
||||
void closeIndex();
|
||||
|
||||
|
||||
void dropIndex();
|
||||
|
||||
void dropIndex(SearchConnection searchConnection);
|
||||
|
||||
}
|
||||
@ -0,0 +1,84 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.time.OffsetDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexFileAttribute;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexSection;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionArea;
|
||||
import com.iqser.red.service.search.v1.server.model.SectionText;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
|
||||
@Service
|
||||
public class IndexDocumentConverterService {
|
||||
|
||||
public IndexDocument convert(String dossierTemplateId,
|
||||
String dossierId,
|
||||
String fileId,
|
||||
String filename,
|
||||
Text text,
|
||||
String assignee,
|
||||
boolean deleted,
|
||||
boolean archived,
|
||||
WorkflowStatus workflowStatus,
|
||||
Map<String, String> fileAttributes) {
|
||||
|
||||
return IndexDocument.builder()
|
||||
.dossierTemplateId(dossierTemplateId)
|
||||
.dossierId(dossierId)
|
||||
.fileId(fileId)
|
||||
.filename(filename)
|
||||
.sections(text.getSectionTexts().stream().map(this::convert).collect(Collectors.toList()))
|
||||
.date(Date.from(OffsetDateTime.now().toInstant()))
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus.name())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
public IndexDocumentUpdate convertUpdateDocument(String assignee, boolean deleted, boolean archived, String workflowStatus, Map<String, String> fileAttributes) {
|
||||
return IndexDocumentUpdate.builder()
|
||||
.assignee(assignee)
|
||||
.dossierDeleted(deleted)
|
||||
.dossierArchived(archived)
|
||||
.workflowStatus(workflowStatus)
|
||||
.fileAttributes(convertFileAttributes(fileAttributes))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private List<IndexFileAttribute> convertFileAttributes(Map<String, String> fileAttributes) {
|
||||
|
||||
List<IndexFileAttribute> converted = new ArrayList<>();
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
fileAttributes.forEach((key, value) -> converted.add(new IndexFileAttribute(key, value)));
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
|
||||
private IndexSection convert(SectionText sectionText) {
|
||||
|
||||
return IndexSection.builder()
|
||||
.sectionNumber(sectionText.getSectionNumber())
|
||||
.text(sectionText.getText())
|
||||
.pages(sectionText.getSectionAreas().stream().map(SectionArea::getPage).collect(Collectors.toSet()))
|
||||
.headline(sectionText.getHeadline())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
@ -0,0 +1,80 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
import java.security.MessageDigest;
|
||||
import java.time.OffsetDateTime;
|
||||
import java.time.temporal.ChronoUnit;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.codec.binary.StringUtils;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class IndexInformationService {
|
||||
|
||||
private static final String PATH_TO_CONFIG = "index/mapping.json";
|
||||
|
||||
private final IndexInformationClient indexInformationClient;
|
||||
|
||||
|
||||
public boolean hasIndexChanged() {
|
||||
|
||||
try {
|
||||
IndexInformation indexInformationFromDatabase = indexInformationClient.getIndexInformation();
|
||||
if (indexInformationFromDatabase == null) {
|
||||
return true;
|
||||
}
|
||||
String fileHash = generateIndexConfigurationHash();
|
||||
log.info("Hash from database {} (updated {}) and hash from file {}",
|
||||
indexInformationFromDatabase.getIndexConfigurationHash(),
|
||||
indexInformationFromDatabase.getUpdateDate(),
|
||||
fileHash);
|
||||
|
||||
if (StringUtils.equals(indexInformationFromDatabase.getIndexConfigurationHash(), fileHash)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Exception while comparing index hashes", e);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
public void updateIndexInformation() {
|
||||
|
||||
IndexInformation indexInformation = IndexInformation.builder()
|
||||
.indexConfigurationHash(generateIndexConfigurationHash())
|
||||
.updateDate(OffsetDateTime.now().truncatedTo(ChronoUnit.MILLIS))
|
||||
.build();
|
||||
|
||||
indexInformationClient.updateIndexInformation(indexInformation);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public String generateIndexConfigurationHash() {
|
||||
|
||||
byte[] buffer = new byte[8192];
|
||||
int count;
|
||||
MessageDigest digest = MessageDigest.getInstance("SHA-256");
|
||||
try (BufferedInputStream bis = new BufferedInputStream(new ClassPathResource(PATH_TO_CONFIG).getInputStream())) {
|
||||
while ((count = bis.read(buffer)) > 0) {
|
||||
digest.update(buffer, 0, count);
|
||||
}
|
||||
|
||||
return Arrays.toString(digest.digest());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import lombok.AccessLevel;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.experimental.FieldDefaults;
|
||||
|
||||
@Data
|
||||
@Builder
|
||||
@FieldDefaults(makeFinal = true, level = AccessLevel.PRIVATE)
|
||||
public class IndexQueryResult {
|
||||
|
||||
boolean indexFound;
|
||||
String numberOfShards;
|
||||
String numberOfReplicas;
|
||||
|
||||
}
|
||||
@ -0,0 +1,9 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
public interface IndexQueryService {
|
||||
|
||||
IndexQueryResult getIndexQueryResult(SearchConnection searchConnection);
|
||||
|
||||
}
|
||||
@ -1,200 +1,23 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static com.iqser.red.service.search.v1.server.exception.IndexException.FAILED_TO_SEARCH;
|
||||
import static com.iqser.red.service.search.v1.server.service.IndexCreatorService.INDEX_NAME;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.NestedQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
public interface SearchService {
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
public class SearchService {
|
||||
SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections);
|
||||
|
||||
private final ElasticsearchClient client;
|
||||
|
||||
|
||||
public SearchResult search(String queryString, List<String> dossierTemplateIds, List<String> dossierIds, String fileId, int page, int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(convertQuery(query, dossierTemplateIds,dossierIds , fileId, returnSections))
|
||||
.from((page - 1) * pageSize)
|
||||
.size(pageSize)
|
||||
.fetchSource(new String[]{"dossierId", "fileId"}, new String[]{"sections"})
|
||||
.highlighter(new HighlightBuilder().field("sections.text").field("filename").highlighterType("fvh"))
|
||||
.trackScores(true);
|
||||
|
||||
SearchRequest request = new SearchRequest(INDEX_NAME).source(searchSourceBuilder);
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return client.search(searchRequest, RequestOptions.DEFAULT);
|
||||
} catch (IOException e) {
|
||||
throw new IndexException(FAILED_TO_SEARCH, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private QueryBuilder convertQuery(Query query, List<String> dossierTemplateIds, List<String> dossierIds, String fileId, boolean returnSections) {
|
||||
|
||||
BoolQueryBuilder entireQuery = QueryBuilders.boolQuery();
|
||||
BoolQueryBuilder sectionsQueries = QueryBuilders.boolQuery();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
QueryBuilder textPhraseQuery = QueryBuilders.matchPhraseQuery("sections.text", must.toLowerCase(Locale.ROOT))
|
||||
.queryName(must);
|
||||
QueryBuilder filenamePhraseQuery = QueryBuilders.matchPhraseQuery("filename", must.toLowerCase(Locale.ROOT))
|
||||
.queryName("filename." + must);
|
||||
QueryBuilder filenameOrTextMustQuery = QueryBuilders.boolQuery()
|
||||
.should(textPhraseQuery)
|
||||
.should(filenamePhraseQuery);
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
QueryBuilder textTermQuery = QueryBuilders.termQuery("sections.text", should.toLowerCase(Locale.ROOT))
|
||||
.queryName(should);
|
||||
QueryBuilder filenameTermQuery = QueryBuilders.termQuery("filename", should.toLowerCase(Locale.ROOT))
|
||||
.queryName("filename." + should);
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
NestedQueryBuilder nestedQuery = QueryBuilders.nestedQuery("sections", sectionsQueries, ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setSize(100)
|
||||
.setFetchSourceContext(new FetchSourceContext(true, new String[]{"sections.headline", "sections.sectionNumber", "sections.pages"}, new String[]{"sections.text"})));
|
||||
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.boolQuery();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.matchQuery("dossierTemplateId", dossierTemplateId));
|
||||
}
|
||||
|
||||
entireQuery.filter(dossierTemplateIdQueryBuilder);
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.boolQuery();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.matchQuery("dossierId", dossierId));
|
||||
}
|
||||
|
||||
entireQuery.filter(dossierIdQueryBuilder);
|
||||
}
|
||||
|
||||
|
||||
if (fileId != null) {
|
||||
entireQuery.filter(QueryBuilders.matchQuery("fileId", fileId));
|
||||
}
|
||||
|
||||
return entireQuery;
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(Arrays.stream(response.getHits().getHits())
|
||||
.map(hit -> convertSearchHit(hit, query))
|
||||
.collect(Collectors.toList()))
|
||||
.maxScore(response.getHits().getMaxScore())
|
||||
.total(response.getHits().getTotalHits().value)
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(SearchHit hit, Query query) {
|
||||
|
||||
Set<String> matchesTerms = Arrays.stream(hit.getMatchedQueries()).map(match -> match.contains("filename.") ? match.replace("filename.", "") : match).collect(Collectors.toSet());
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream())
|
||||
.filter(term -> !matchesTerms.contains(term))
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.getScore())
|
||||
.dossierId((String) hit.getSourceAsMap().get("dossierId"))
|
||||
.dossierTemplateId((String) hit.getSourceAsMap().get("dossierTemplateId"))
|
||||
.fileId((String) hit.getSourceAsMap().get("fileId"))
|
||||
.highlights(hit.getHighlightFields()
|
||||
.entrySet()
|
||||
.stream()
|
||||
.collect(Collectors.toMap(e -> e.getKey(), e -> Arrays.stream(e.getValue().getFragments())
|
||||
.map(Text::string)
|
||||
.collect(Collectors.toSet()))))
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.getInnerHits() != null) {
|
||||
SearchHits sectionHits = hit.getInnerHits().get("sections");
|
||||
|
||||
matchedDocumentBuilder.matchedSections(Arrays.stream(sectionHits.getHits())
|
||||
.map(innerHit -> convertInnerHit(innerHit))
|
||||
.collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.getTotalHits().value == sectionHits.getHits().length ? true : false);
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(SearchHit hit) {
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline((String) hit.getSourceAsMap().get("headline"))
|
||||
.sectionNumber((Integer) hit.getSourceAsMap().get("sectionNumber"))
|
||||
.pages(new HashSet<>((ArrayList<Integer>) hit.getSourceAsMap().get("pages")))
|
||||
.matchedTerms(Arrays.stream(hit.getMatchedQueries()).collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@ -1,16 +1,14 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.file.management.v1.api.model.FileType;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@ -20,21 +18,16 @@ import lombok.extern.slf4j.Slf4j;
|
||||
public class TextStorageService {
|
||||
|
||||
private final StorageService storageService;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
|
||||
@Timed("redactmanager_getTextSearchService")
|
||||
public Text getText(String dossierId, String fileId) {
|
||||
|
||||
InputStreamResource inputStreamResource;
|
||||
try {
|
||||
inputStreamResource = storageService.getObject(StorageIdUtils.getStorageId(dossierId, fileId, FileType.TEXT));
|
||||
return storageService.readJSONObject(TenantContext.getTenantId(), StorageIdUtils.getStorageId(dossierId, fileId, FileType.SIMPLIFIED_TEXT), Text.class);
|
||||
} catch (StorageObjectDoesNotExist e) {
|
||||
throw new RuntimeException("Text is not available", e);
|
||||
}
|
||||
|
||||
try {
|
||||
return objectMapper.readValue(inputStreamResource.getInputStream(), Text.class);
|
||||
} catch (IOException e) {
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Could not convert Text", e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,43 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
import co.elastic.clients.elasticsearch.core.DeleteRequest;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.build();
|
||||
|
||||
try {
|
||||
clientCache.getClient().delete(request);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentDeleteError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,45 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@Timed("redactmanager_indexDocument")
|
||||
public void indexDocument(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,53 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch._types.Refresh;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_updateDocument")
|
||||
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.doc(indexDocumentUpdate)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
|
||||
} catch (IOException | ElasticsearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentUpdateError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_documentExists")
|
||||
public boolean documentExists(String fileId) {
|
||||
|
||||
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,66 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import co.elastic.clients.elasticsearch.ElasticsearchClient;
|
||||
import co.elastic.clients.json.jackson.JacksonJsonpMapper;
|
||||
import co.elastic.clients.transport.ElasticsearchTransport;
|
||||
import co.elastic.clients.transport.rest_client.RestClientTransport;
|
||||
import lombok.Data;
|
||||
import lombok.experimental.Delegate;
|
||||
|
||||
@Data
|
||||
@SuppressWarnings("PMD")
|
||||
public class EsClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
|
||||
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
@Delegate
|
||||
private ElasticsearchClient elasticsearchClient;
|
||||
|
||||
|
||||
public EsClient(SearchConnection searchConnection) {
|
||||
|
||||
HttpHost[] httpHost = searchConnection.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
|
||||
.toList()
|
||||
.toArray(new HttpHost[searchConnection.getHosts().size()]);
|
||||
|
||||
var builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(ABSURD_HIGH_TIMEOUT)
|
||||
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
|
||||
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
|
||||
}
|
||||
|
||||
ElasticsearchTransport transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
|
||||
|
||||
this.searchConnection = searchConnection;
|
||||
this.elasticsearchClient = new ElasticsearchClient(transport);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public void terminate() {
|
||||
|
||||
elasticsearchClient._transport().close();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,102 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class EsClientCache {
|
||||
|
||||
private final TenantsClient tenantsClient;
|
||||
private final EncryptionDecryptionService encryptionDecryptionService;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
@Value("${multitenancy.client-cache.maximumSize:100}")
|
||||
private Long maximumSize;
|
||||
|
||||
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
|
||||
private Integer expireAfterAccess;
|
||||
|
||||
private LoadingCache<String, EsClient> clients;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void isClientAliveOrTerminate() {
|
||||
|
||||
try {
|
||||
var client = clients.get(TenantContext.getTenantId());
|
||||
try {
|
||||
|
||||
log.info("Checking if client is still alive: {}", client.info());
|
||||
} catch (Exception e) {
|
||||
|
||||
try {
|
||||
client.terminate();
|
||||
} catch (Exception e2) {
|
||||
|
||||
log.info("Failed to terminate ES Client");
|
||||
clients.invalidate(TenantContext.getTenantId());
|
||||
}
|
||||
}
|
||||
}catch (Exception e){
|
||||
log.error("Failed to terminate/invalide client", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@PostConstruct
|
||||
protected void createCache() {
|
||||
|
||||
clients = CacheBuilder.newBuilder()
|
||||
.maximumSize(maximumSize)
|
||||
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<String, EsClient>) removal -> {
|
||||
try {
|
||||
removal.getValue().terminate();
|
||||
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
|
||||
} catch (Exception e) {
|
||||
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
|
||||
}
|
||||
})
|
||||
.build(new CacheLoader<>() {
|
||||
public EsClient load(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
|
||||
if (tenant.getSearchConnection().getPassword() != null) {
|
||||
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
|
||||
}
|
||||
var client = new EsClient(tenant.getSearchConnection());
|
||||
log.info("Initialized elasticsearch client for tenant {}", tenantId);
|
||||
indexCreatorService.createIndex(client);
|
||||
return client;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public EsClient getClient() {
|
||||
|
||||
return clients.get(TenantContext.getTenantId());
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,84 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.mapping.TypeMapping;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexSettings;
|
||||
import co.elastic.clients.elasticsearch.indices.MappingLimitSettingsNestedObjects;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexCreatorServiceImpl {
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void createIndex(EsClient esClient) {
|
||||
|
||||
if (!indexExists(esClient)) {
|
||||
try {
|
||||
var response = esClient.indices()
|
||||
.create(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix()))
|
||||
.settings(createIndexSettings(esClient))
|
||||
.mappings(createIndexMapping()));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists(EsClient esClient) {
|
||||
|
||||
try {
|
||||
var response = esClient.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix())));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw IndexException.indexExists(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private TypeMapping createIndexMapping() {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
return new TypeMapping.Builder().withJson(is).build();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private IndexSettings createIndexSettings(EsClient esClient) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
return new IndexSettings.Builder().withJson(is)
|
||||
.numberOfShards(esClient.getSearchConnection().getNumberOfShards())
|
||||
.numberOfReplicas(esClient.getSearchConnection().getNumberOfReplicas())
|
||||
.mapping(m -> m.nestedObjects(MappingLimitSettingsNestedObjects.of(a -> a.limit(settings.getNumberOfNestedObjectLimit()))))
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,94 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
|
||||
public void recreateIndex() {
|
||||
|
||||
closeIndex();
|
||||
dropIndex();
|
||||
indexCreatorService.createIndex(clientCache.getClient());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void recreateIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new EsClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
indexCreatorService.createIndex(client);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
public void dropIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new EsClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void closeIndex(EsClient client, String indexPrefix) {
|
||||
|
||||
var closeIndexResponse = client.indices()
|
||||
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
throw new IndexException("Error while closing index");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void dropIndex(EsClient client, String indexPrefix) {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = client.indices()
|
||||
.delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
} else {
|
||||
throw new IndexException("Error while dropping index");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,70 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.ElasticsearchException;
|
||||
import co.elastic.clients.elasticsearch.indices.GetIndicesSettingsResponse;
|
||||
import co.elastic.clients.elasticsearch.indices.IndexState;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexQueryServiceImpl implements IndexQueryService {
|
||||
|
||||
@SneakyThrows
|
||||
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
|
||||
|
||||
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
|
||||
|
||||
getIndexState(searchConnection).ifPresent(indexState -> {
|
||||
builder.indexFound(true);
|
||||
|
||||
var indexSettings = indexState.settings();
|
||||
if (indexSettings != null) {
|
||||
|
||||
String replicas = indexSettings.numberOfReplicas();
|
||||
String shards = indexSettings.numberOfShards();
|
||||
|
||||
if (indexSettings.index() != null) {
|
||||
|
||||
if (replicas == null) {
|
||||
replicas = indexSettings.index().numberOfReplicas();
|
||||
}
|
||||
if (shards == null) {
|
||||
shards = indexSettings.index().numberOfShards();
|
||||
}
|
||||
}
|
||||
builder.numberOfReplicas(replicas).numberOfShards(shards);
|
||||
}
|
||||
});
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
|
||||
|
||||
var esClient = new EsClient(searchConnection);
|
||||
var indexName = IndexNameHelper.getSearchIndex(esClient.getSearchConnection().getIndexPrefix());
|
||||
try {
|
||||
GetIndicesSettingsResponse settings = esClient.indices().getSettings(i -> i.index(indexName));
|
||||
return Optional.ofNullable(settings.get(indexName));
|
||||
} catch (ElasticsearchException elasticsearchException) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,337 @@
|
||||
package com.iqser.red.service.search.v1.server.service.elasticsearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import co.elastic.clients.elasticsearch._types.FieldValue;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.ChildScoreMode;
|
||||
import co.elastic.clients.elasticsearch._types.query_dsl.QueryBuilders;
|
||||
import co.elastic.clients.elasticsearch.core.SearchRequest;
|
||||
import co.elastic.clients.elasticsearch.core.SearchResponse;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlightField;
|
||||
import co.elastic.clients.elasticsearch.core.search.HighlighterType;
|
||||
import co.elastic.clients.elasticsearch.core.search.Hit;
|
||||
import co.elastic.clients.elasticsearch.core.search.InnerHitsResult;
|
||||
import co.elastic.clients.json.JsonData;
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import jakarta.json.JsonObject;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "elasticsearch")
|
||||
public class SearchServiceImpl implements SearchService {
|
||||
|
||||
private final EsClientCache clientCache;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.source(s -> s.filter(f -> f.includes("dossierId",
|
||||
"dossierTemplateId",
|
||||
"dossierDeleted",
|
||||
"dossierArchived",
|
||||
"filename",
|
||||
"fileId",
|
||||
"assignee",
|
||||
"dossierStatus",
|
||||
"workflowStatus",
|
||||
"fileAttributes")))
|
||||
.highlight(h -> h.type(HighlighterType.FastVector).fields(highlightFieldMap))
|
||||
.trackScores(true)
|
||||
.build();
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private co.elastic.clients.elasticsearch._types.query_dsl.Query convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
var entireQuery = QueryBuilders.bool();
|
||||
var sectionsQueries = QueryBuilders.bool();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
var textPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must));
|
||||
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(must.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + must));
|
||||
|
||||
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
var textTermQuery = QueryBuilders.matchPhrase(q -> q.field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should));
|
||||
var filenameTermQuery = QueryBuilders.matchPhrasePrefix(q -> q.field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should));
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase(q -> q.field("fileAttributes.value")
|
||||
.query(should.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + should));
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
var nestedQuery = QueryBuilders.nested(n -> n.scoreMode(ChildScoreMode.Avg)
|
||||
.queryName("sections")
|
||||
.query(sectionsQueries.build()._toQuery())
|
||||
.path("sections")
|
||||
.innerHits(i -> i.size(100)));
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
var filterQuery = QueryBuilders.bool();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierTemplateId").query(dossierTemplateId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match(q -> q.field("dossierId").query(dossierId)));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("fileId").query(fileId)));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("assignee").query(assignee)));
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))));
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms(q -> q.field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build())))));
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.match(q -> q.field("workflowStatus").query(workflowStatus)));
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.name").query(fileAttributeKey)))
|
||||
.must(QueryBuilders.match(q -> q.field("fileAttributes.value").query(fileAttributes.get(fileAttributeKey))))
|
||||
.build()
|
||||
._toQuery()));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
List<Hit> hits = response.hits().hits();
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.hits().maxScore().floatValue())
|
||||
.total(response.hits().total().value())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
|
||||
List<String> m = hit.matchedQueries();
|
||||
|
||||
Set<String> matchesTerms = m.stream()
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.score().floatValue())
|
||||
.dossierId(indexDocument.getDossierId())
|
||||
.dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
.fileId(indexDocument.getFileId())
|
||||
.assignee(indexDocument.getAssignee())
|
||||
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
.workflowStatus(indexDocument.getWorkflowStatus())
|
||||
.fileName(indexDocument.getFilename())
|
||||
.dossierDeleted(indexDocument.isDossierDeleted())
|
||||
.dossierArchived(indexDocument.isDossierArchived())
|
||||
.highlights(hit.highlight())
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
|
||||
JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
|
||||
var jsonArray = indexSection.getJsonArray("pages");
|
||||
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getString("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,44 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.opensearch.client.opensearch.core.DeleteRequest;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentDeleteServiceImpl implements DocumentDeleteService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void deleteDocument(String fileId) {
|
||||
|
||||
DeleteRequest request = new DeleteRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.build();
|
||||
|
||||
try {
|
||||
clientCache.getClient().delete(request);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentDeleteError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@ -0,0 +1,44 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentIndexService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentIndexServiceImpl implements DocumentIndexService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@Timed("redactmanager_indexDocument")
|
||||
public void indexDocument(IndexDocument indexDocument) {
|
||||
|
||||
try {
|
||||
clientCache.getClient().index(i -> i.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(indexDocument.getFileId())
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy()))
|
||||
.document(indexDocument));
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentIndexError(indexDocument.getFileId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,53 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch._types.Refresh;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocumentUpdate;
|
||||
import com.iqser.red.service.search.v1.server.service.DocumentUpdateService;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class DocumentUpdateServiceImpl implements DocumentUpdateService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_updateDocument")
|
||||
public void updateDocument(String fileId, IndexDocumentUpdate indexDocumentUpdate) {
|
||||
|
||||
try {
|
||||
clientCache.getClient()
|
||||
.update(u -> u.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.id(fileId)
|
||||
.doc(indexDocumentUpdate)
|
||||
.refresh(Refresh._DESERIALIZER.parse(settings.getRefreshPolicy())), IndexDocumentUpdate.class);
|
||||
} catch (IOException | OpenSearchException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.documentUpdateError(fileId, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Timed("redactmanager_documentExists")
|
||||
public boolean documentExists(String fileId) {
|
||||
|
||||
return clientCache.getClient().exists(e -> e.index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix())).id(fileId)).value();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,101 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URL;
|
||||
|
||||
import org.opensearch.client.json.JsonpMapper;
|
||||
import org.opensearch.client.opensearch._types.mapping.TypeMapping;
|
||||
import org.opensearch.client.opensearch.indices.IndexSettings;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.core.io.ResourceLoader;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
|
||||
import jakarta.json.stream.JsonParser;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexCreatorServiceImpl {
|
||||
|
||||
private final ElasticsearchSettings settings;
|
||||
|
||||
|
||||
public void createIndex(OpensearchClient client) {
|
||||
|
||||
if (!indexExists(client)) {
|
||||
|
||||
try {
|
||||
var response = client.indices()
|
||||
.create(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix()))
|
||||
.settings(createIndexSettings(client))
|
||||
.mappings(createIndexMapping(client)));
|
||||
log.info("Successfully created index: {}", response.index());
|
||||
} catch (IOException e) {
|
||||
log.error("Failed to create index.", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean indexExists(OpensearchClient client) {
|
||||
|
||||
try {
|
||||
var response = client.indices().exists(i -> i.index(IndexNameHelper.getSearchIndex(client.getSearchConnection().getIndexPrefix())));
|
||||
return response.value();
|
||||
} catch (IOException e) {
|
||||
throw IndexException.indexExists(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private TypeMapping createIndexMapping(OpensearchClient client) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/mapping.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
|
||||
JsonpMapper mapper = client._transport().jsonpMapper();
|
||||
JsonParser parser = mapper.jsonProvider().createParser(is);
|
||||
|
||||
return TypeMapping._DESERIALIZER.deserialize(parser, mapper);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private IndexSettings createIndexSettings(OpensearchClient client) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource("index/settings.json");
|
||||
|
||||
try (InputStream is = resource.openStream()) {
|
||||
|
||||
JsonpMapper mapper = client._transport().jsonpMapper();
|
||||
JsonParser parser = mapper.jsonProvider().createParser(is);
|
||||
|
||||
var indexSettingsFromJson = IndexSettings._DESERIALIZER.deserialize(parser, mapper);
|
||||
|
||||
// It is not possible to set "index.mapping.nested_objects.limit", OpenSearch seems to not have this param.
|
||||
// Hopefully they don't hava a limit for this, I was not able to find anything.
|
||||
// As elasticsearch has a limit for this, and we can't set it, it seems this is the only reason for now to have both clients.
|
||||
var indexSettings = new IndexSettings.Builder().index(indexSettingsFromJson.index())
|
||||
.numberOfReplicas(client.getSearchConnection().getNumberOfReplicas())
|
||||
.numberOfShards(client.getSearchConnection().getNumberOfShards())
|
||||
.analysis(indexSettingsFromJson.analysis())
|
||||
.build();
|
||||
|
||||
return indexSettings;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,92 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexDeleteService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class IndexDeleteServiceImpl implements IndexDeleteService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
|
||||
public void recreateIndex() {
|
||||
|
||||
closeIndex();
|
||||
dropIndex();
|
||||
indexCreatorService.createIndex(clientCache.getClient());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void recreateIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new OpensearchClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
indexCreatorService.createIndex(client);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void closeIndex() {
|
||||
|
||||
closeIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void dropIndex() {
|
||||
|
||||
dropIndex(clientCache.getClient(), clientCache.getClient().getSearchConnection().getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
public void dropIndex(SearchConnection searchConnection) {
|
||||
|
||||
var client = new OpensearchClient(searchConnection);
|
||||
closeIndex(client, searchConnection.getIndexPrefix());
|
||||
dropIndex(client, searchConnection.getIndexPrefix());
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void closeIndex(OpensearchClient opensearchClient, String indexPrefix) {
|
||||
|
||||
var closeIndexResponse = opensearchClient.indices()
|
||||
.close(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
if (closeIndexResponse.acknowledged()) {
|
||||
log.info("Index is closed");
|
||||
} else {
|
||||
throw new IndexException("Error while closing index");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
private void dropIndex(OpensearchClient opensearchClient, String indexPrefix) {
|
||||
|
||||
log.info("Will drop index");
|
||||
var deleteIndexResponse = opensearchClient.indices().delete(i -> i.index(IndexNameHelper.getSearchIndex(indexPrefix)).timeout(t -> t.time("2m")));
|
||||
|
||||
if (deleteIndexResponse.acknowledged()) {
|
||||
log.info("Index is dropped");
|
||||
} else {
|
||||
throw new IndexException("Error while dropping index");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,56 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import org.opensearch.client.opensearch._types.OpenSearchException;
|
||||
import org.opensearch.client.opensearch.indices.GetIndicesSettingsResponse;
|
||||
import org.opensearch.client.opensearch.indices.IndexState;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryResult;
|
||||
import com.iqser.red.service.search.v1.server.service.IndexQueryService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
@SuppressWarnings("PMD")
|
||||
public class IndexQueryServiceImpl implements IndexQueryService {
|
||||
|
||||
@SneakyThrows
|
||||
public IndexQueryResult getIndexQueryResult(SearchConnection searchConnection) {
|
||||
|
||||
IndexQueryResult.IndexQueryResultBuilder builder = IndexQueryResult.builder();
|
||||
|
||||
Optional<IndexState> optionalIndexState = getIndexState(searchConnection);
|
||||
if (optionalIndexState.isPresent()) {
|
||||
builder.indexFound(true);
|
||||
var indexSettings = optionalIndexState.get().settings();
|
||||
if (indexSettings != null) {
|
||||
builder.numberOfReplicas(indexSettings.numberOfReplicas()).numberOfShards(indexSettings.numberOfShards());
|
||||
}
|
||||
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
private Optional<IndexState> getIndexState(SearchConnection searchConnection) {
|
||||
|
||||
var opensearchClient = new OpensearchClient(searchConnection);
|
||||
var indexName = IndexNameHelper.getSearchIndex(opensearchClient.getSearchConnection().getIndexPrefix());
|
||||
try {
|
||||
GetIndicesSettingsResponse settings = opensearchClient.indices().getSettings(i -> i.index(indexName));
|
||||
return Optional.ofNullable(settings.get(indexName));
|
||||
} catch (OpenSearchException openSearchException) {
|
||||
return Optional.empty();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,62 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import lombok.Data;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.experimental.Delegate;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.apache.http.auth.AuthScope;
|
||||
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.opensearch.client.RestClient;
|
||||
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
|
||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||
import org.opensearch.client.transport.rest_client.RestClientTransport;
|
||||
|
||||
@Data
|
||||
@SuppressWarnings("PMD")
|
||||
public class OpensearchClient {
|
||||
|
||||
// Lower timeouts should be set per request.
|
||||
private static final int ABSURD_HIGH_TIMEOUT = 600_000;
|
||||
|
||||
private SearchConnection searchConnection;
|
||||
|
||||
@Delegate
|
||||
private OpenSearchClient client;
|
||||
|
||||
|
||||
public OpensearchClient(SearchConnection searchConnection) {
|
||||
|
||||
HttpHost[] httpHost = searchConnection.getHosts()
|
||||
.stream()
|
||||
.map(host -> new HttpHost(host, searchConnection.getPort(), searchConnection.getScheme()))
|
||||
.toList()
|
||||
.toArray(new HttpHost[searchConnection.getHosts().size()]);
|
||||
|
||||
var builder = RestClient.builder(httpHost)
|
||||
.setRequestConfigCallback(requestConfigBuilder -> requestConfigBuilder.setConnectTimeout(0)
|
||||
.setConnectionRequestTimeout(ABSURD_HIGH_TIMEOUT)
|
||||
.setSocketTimeout(ABSURD_HIGH_TIMEOUT));
|
||||
|
||||
if (searchConnection.getUsername() != null && !searchConnection.getUsername().isEmpty()) {
|
||||
final CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(searchConnection.getUsername(), searchConnection.getPassword()));
|
||||
builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider));
|
||||
}
|
||||
|
||||
var transport = new RestClientTransport(builder.build(), new JacksonJsonpMapper());
|
||||
|
||||
this.searchConnection = searchConnection;
|
||||
this.client = new OpenSearchClient(transport);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void terminate() {
|
||||
|
||||
client._transport().close();
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,101 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.knecon.fforesight.tenantcommons.EncryptionDecryptionService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.SneakyThrows;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class OpensearchClientCache {
|
||||
|
||||
private final TenantsClient tenantsClient;
|
||||
private final EncryptionDecryptionService encryptionDecryptionService;
|
||||
private final IndexCreatorServiceImpl indexCreatorService;
|
||||
|
||||
@Value("${multitenancy.client-cache.maximumSize:100}")
|
||||
private Long maximumSize;
|
||||
|
||||
@Value("${multitenancy.client-cache.expireAfterAccess:10}")
|
||||
private Integer expireAfterAccess;
|
||||
|
||||
private LoadingCache<String, OpensearchClient> clients;
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public void isClientAliveOrTerminate() {
|
||||
|
||||
try {
|
||||
var client = clients.get(TenantContext.getTenantId());
|
||||
try {
|
||||
|
||||
log.info("Checking if client is still alive: {}", client.info());
|
||||
} catch (Exception e) {
|
||||
|
||||
try {
|
||||
client.terminate();
|
||||
} catch (Exception e2) {
|
||||
|
||||
log.info("Failed to terminate ES Client");
|
||||
clients.invalidate(TenantContext.getTenantId());
|
||||
}
|
||||
}
|
||||
}catch (Exception e){
|
||||
log.error("Failed to terminate/invalide client", e);
|
||||
}
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
protected void createCache() {
|
||||
|
||||
clients = CacheBuilder.newBuilder()
|
||||
.maximumSize(maximumSize)
|
||||
.expireAfterAccess(expireAfterAccess, TimeUnit.MINUTES)
|
||||
.removalListener((RemovalListener<String, OpensearchClient>) removal -> {
|
||||
try {
|
||||
removal.getValue().terminate();
|
||||
log.info("Closed elasticsearch client for tenant {}", removal.getKey());
|
||||
} catch (Exception e) {
|
||||
log.info("Failed to close elasticsearch client for tenant {}", removal.getKey());
|
||||
}
|
||||
})
|
||||
.build(new CacheLoader<>() {
|
||||
public OpensearchClient load(String tenantId) {
|
||||
|
||||
var tenant = tenantsClient.getTenant(tenantId);
|
||||
|
||||
if (tenant.getSearchConnection().getPassword() != null) {
|
||||
tenant.getSearchConnection().setPassword(encryptionDecryptionService.decrypt(tenant.getSearchConnection().getPassword()));
|
||||
}
|
||||
var client = new OpensearchClient(tenant.getSearchConnection());
|
||||
log.info("Initialized elasticsearch client for tenant {}", tenantId);
|
||||
indexCreatorService.createIndex(client);
|
||||
return client;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
public OpensearchClient getClient() {
|
||||
|
||||
return clients.get(TenantContext.getTenantId());
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,357 @@
|
||||
package com.iqser.red.service.search.v1.server.service.opensearch;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import org.opensearch.client.json.JsonData;
|
||||
import org.opensearch.client.opensearch._types.FieldValue;
|
||||
import org.opensearch.client.opensearch._types.query_dsl.ChildScoreMode;
|
||||
import org.opensearch.client.opensearch._types.query_dsl.QueryBuilders;
|
||||
import org.opensearch.client.opensearch.core.SearchRequest;
|
||||
import org.opensearch.client.opensearch.core.SearchResponse;
|
||||
import org.opensearch.client.opensearch.core.search.BuiltinHighlighterType;
|
||||
import org.opensearch.client.opensearch.core.search.HighlightField;
|
||||
import org.opensearch.client.opensearch.core.search.HighlighterType;
|
||||
import org.opensearch.client.opensearch.core.search.Hit;
|
||||
import org.opensearch.client.opensearch.core.search.InnerHitsResult;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.MatchedDocument;
|
||||
import com.iqser.red.service.search.v1.model.MatchedSection;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.exception.IndexException;
|
||||
import com.iqser.red.service.search.v1.server.model.IndexDocument;
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
import com.iqser.red.service.search.v1.server.service.SearchService;
|
||||
import com.iqser.red.service.search.v1.server.utils.IndexNameHelper;
|
||||
import com.iqser.red.service.search.v1.server.utils.QueryStringConverter;
|
||||
|
||||
import io.micrometer.core.annotation.Timed;
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import jakarta.json.JsonObject;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Service
|
||||
@RequiredArgsConstructor
|
||||
@ConditionalOnProperty(prefix = "search", name = "backend", havingValue = "opensearch")
|
||||
public class SearchServiceImpl implements SearchService {
|
||||
|
||||
private final OpensearchClientCache clientCache;
|
||||
|
||||
|
||||
@Timed("redactmanager_search")
|
||||
public SearchResult search(String queryString,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
int page,
|
||||
int pageSize,
|
||||
boolean returnSections) {
|
||||
|
||||
Query query = QueryStringConverter.convert(queryString);
|
||||
|
||||
Map<String, HighlightField> highlightFieldMap = new HashMap<>();
|
||||
highlightFieldMap.put("sections.text", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("filename", new HighlightField.Builder().build());
|
||||
highlightFieldMap.put("fileAttributes.value", new HighlightField.Builder().build());
|
||||
|
||||
SearchRequest request = new SearchRequest.Builder().index(IndexNameHelper.getSearchIndex(clientCache.getClient().getSearchConnection().getIndexPrefix()))
|
||||
.query(convertQuery(query,
|
||||
dossierTemplateIds,
|
||||
dossierIds,
|
||||
fileId,
|
||||
assignee,
|
||||
includeDeletedDossiers,
|
||||
includeArchivedDossiers,
|
||||
workflowStatus,
|
||||
fileAttributes,
|
||||
returnSections))
|
||||
.from(getPageOrDefault(page) * getPageSizeOrDefault(pageSize))
|
||||
.size(getPageSizeOrDefault(pageSize))
|
||||
.source(s -> s.filter(f -> f.includes("dossierId",
|
||||
"dossierTemplateId",
|
||||
"dossierDeleted",
|
||||
"dossierArchived",
|
||||
"filename",
|
||||
"fileId",
|
||||
"assignee",
|
||||
"dossierStatus",
|
||||
"workflowStatus",
|
||||
"fileAttributes")))
|
||||
.highlight(h -> h.type(HighlighterType.of(b -> b.builtin(BuiltinHighlighterType.FastVector))).fields(highlightFieldMap))
|
||||
.trackScores(true)
|
||||
.build();
|
||||
|
||||
SearchResponse response = execute(request);
|
||||
|
||||
return convert(response, query);
|
||||
}
|
||||
|
||||
|
||||
protected SearchResponse<IndexDocument> execute(SearchRequest searchRequest) {
|
||||
|
||||
try {
|
||||
return clientCache.getClient().search(searchRequest, IndexDocument.class);
|
||||
} catch (IOException e) {
|
||||
clientCache.isClientAliveOrTerminate();
|
||||
throw IndexException.searchFailed(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private org.opensearch.client.opensearch._types.query_dsl.Query convertQuery(Query query,
|
||||
List<String> dossierTemplateIds,
|
||||
List<String> dossierIds,
|
||||
String fileId,
|
||||
String assignee,
|
||||
boolean includeDeletedDossiers,
|
||||
boolean includeArchivedDossiers,
|
||||
String workflowStatus,
|
||||
Map<String, String> fileAttributes,
|
||||
boolean returnSections) {
|
||||
|
||||
var entireQuery = QueryBuilders.bool();
|
||||
var sectionsQueries = QueryBuilders.bool();
|
||||
|
||||
for (String must : query.getMusts()) {
|
||||
|
||||
var textPhraseQuery = QueryBuilders.matchPhrase().field("sections.text").query(must.toLowerCase(Locale.ROOT)).queryName(must).build()._toQuery();
|
||||
var filenamePhraseQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(must.toLowerCase(Locale.ROOT)).queryName("filename." + must).build()._toQuery();
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
|
||||
.field("fileAttributes.value")
|
||||
.query(must.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + must)
|
||||
.build()
|
||||
._toQuery();
|
||||
|
||||
var filenameOrTextMustQuery = QueryBuilders.bool().should(textPhraseQuery).should(filenamePhraseQuery).should(fileAttributesPhraseQuery).build()._toQuery();
|
||||
entireQuery.must(filenameOrTextMustQuery);
|
||||
sectionsQueries.should(textPhraseQuery);
|
||||
}
|
||||
for (String should : query.getShoulds()) {
|
||||
|
||||
var textTermQuery = QueryBuilders.matchPhrase().field("sections.text").query(should.toLowerCase(Locale.ROOT)).queryName(should).build()._toQuery();
|
||||
var filenameTermQuery = QueryBuilders.matchPhrasePrefix().field("filename").query(should.toLowerCase(Locale.ROOT)).queryName("filename." + should).build()._toQuery();
|
||||
var fileAttributesPhraseQuery = QueryBuilders.matchPhrase()
|
||||
.field("fileAttributes.value")
|
||||
.query(should.toLowerCase(Locale.ROOT))
|
||||
.queryName("fileAttributes." + should)
|
||||
.build()
|
||||
._toQuery();
|
||||
entireQuery.should(textTermQuery);
|
||||
entireQuery.should(filenameTermQuery);
|
||||
entireQuery.should(fileAttributesPhraseQuery);
|
||||
sectionsQueries.should(textTermQuery);
|
||||
}
|
||||
|
||||
if (returnSections) {
|
||||
var nestedQuery = QueryBuilders.nested()
|
||||
.scoreMode(ChildScoreMode.Avg)
|
||||
.queryName("sections")
|
||||
.query(sectionsQueries.build()._toQuery())
|
||||
.path("sections")
|
||||
.innerHits(i -> i.size(100))
|
||||
.build()
|
||||
._toQuery();
|
||||
entireQuery.should(nestedQuery);
|
||||
}
|
||||
|
||||
var filterQuery = QueryBuilders.bool();
|
||||
|
||||
if (dossierTemplateIds != null && !dossierTemplateIds.isEmpty()) {
|
||||
|
||||
var dossierTemplateIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierTemplateId : dossierTemplateIds) {
|
||||
if (StringUtils.isNotEmpty(dossierTemplateId)) {
|
||||
dossierTemplateIdQueryBuilder = dossierTemplateIdQueryBuilder.should(QueryBuilders.match()
|
||||
.field("dossierTemplateId")
|
||||
.query(q -> q.stringValue(dossierTemplateId))
|
||||
.build()
|
||||
._toQuery());
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierTemplateIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (dossierIds != null && !dossierIds.isEmpty()) {
|
||||
|
||||
var dossierIdQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var dossierId : dossierIds) {
|
||||
if (StringUtils.isNotEmpty(dossierId)) {
|
||||
dossierIdQueryBuilder = dossierIdQueryBuilder.should(QueryBuilders.match().field("dossierId").query(q -> q.stringValue(dossierId)).build()._toQuery());
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(dossierIdQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(fileId)) {
|
||||
filterQuery.must(QueryBuilders.match().field("fileId").query(q -> q.stringValue(fileId)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(assignee)) {
|
||||
filterQuery.must(QueryBuilders.match().field("assignee").query(q -> q.stringValue(assignee)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (includeArchivedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms()
|
||||
.field("dossierArchived")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
|
||||
.build()
|
||||
._toQuery());
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms().field("dossierArchived").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
|
||||
}
|
||||
|
||||
if (includeDeletedDossiers) {
|
||||
filterQuery.must(QueryBuilders.terms()
|
||||
.field("dossierDeleted")
|
||||
.terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(true).build(), new FieldValue.Builder().booleanValue(false).build())))
|
||||
.build()
|
||||
._toQuery());
|
||||
} else {
|
||||
filterQuery.must(QueryBuilders.terms().field("dossierDeleted").terms(t -> t.value(List.of(new FieldValue.Builder().booleanValue(false).build()))).build()._toQuery());
|
||||
}
|
||||
|
||||
if (StringUtils.isNotEmpty(workflowStatus)) {
|
||||
filterQuery.must(QueryBuilders.match().field("workflowStatus").query(q -> q.stringValue(workflowStatus)).build()._toQuery());
|
||||
}
|
||||
|
||||
if (fileAttributes != null && !fileAttributes.isEmpty()) {
|
||||
var fileAttributesQueryBuilder = QueryBuilders.bool();
|
||||
|
||||
for (var fileAttributeKey : fileAttributes.keySet()) {
|
||||
if (StringUtils.isNotEmpty(fileAttributeKey)) {
|
||||
fileAttributesQueryBuilder.filter(List.of(QueryBuilders.bool()
|
||||
.must(QueryBuilders.match().field("fileAttributes.name").query(q -> q.stringValue(fileAttributeKey)).build()._toQuery())
|
||||
.must(QueryBuilders.match().field("fileAttributes.value").query(q -> q.stringValue(fileAttributes.get(fileAttributeKey))).build()._toQuery())
|
||||
.build()
|
||||
._toQuery()));
|
||||
}
|
||||
}
|
||||
|
||||
filterQuery.must(fileAttributesQueryBuilder.build()._toQuery());
|
||||
}
|
||||
|
||||
return QueryBuilders.bool().filter(filterQuery.build()._toQuery()).must(entireQuery.build()._toQuery()).build()._toQuery();
|
||||
}
|
||||
|
||||
|
||||
private SearchResult convert(SearchResponse response, Query query) {
|
||||
|
||||
List<Hit> hits = response.hits().hits();
|
||||
|
||||
return SearchResult.builder()
|
||||
.matchedDocuments(hits.stream().map(hit -> convertSearchHit((Hit) hit, query)).collect(Collectors.toList()))
|
||||
.maxScore(response.maxScore() == null ? 0 : response.maxScore().floatValue())
|
||||
.total(response.hits().total().value())
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private MatchedDocument convertSearchHit(Hit hit, Query query) {
|
||||
|
||||
List<String> m = hit.matchedQueries();
|
||||
|
||||
Set<String> matchesTerms = m.stream()
|
||||
.map(match -> match.contains("filename.") ? match.replace("filename.", "") : match)
|
||||
.map(match -> match.contains("fileAttributes.") ? match.replace("fileAttributes.", "") : match)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
Set<String> unmatchedTerms = Stream.concat(query.getMusts().stream(), query.getShoulds().stream()).filter(term -> !matchesTerms.contains(term)).collect(Collectors.toSet());
|
||||
|
||||
IndexDocument indexDocument = (IndexDocument) hit.source();
|
||||
|
||||
MatchedDocument.MatchedDocumentBuilder matchedDocumentBuilder = MatchedDocument.builder()
|
||||
.score(hit.score().floatValue())
|
||||
.dossierId(indexDocument.getDossierId())
|
||||
.dossierTemplateId(indexDocument.getDossierTemplateId())
|
||||
.fileId(indexDocument.getFileId())
|
||||
.assignee(indexDocument.getAssignee())
|
||||
.fileAttributes(convertFileAttributes(indexDocument.getFileAttributes()))
|
||||
.workflowStatus(indexDocument.getWorkflowStatus())
|
||||
.fileName(indexDocument.getFilename())
|
||||
.dossierDeleted(indexDocument.isDossierDeleted())
|
||||
.dossierArchived(indexDocument.isDossierArchived())
|
||||
.highlights(hit.highlight())
|
||||
.matchedTerms(matchesTerms)
|
||||
.unmatchedTerms(unmatchedTerms);
|
||||
|
||||
if (hit.innerHits() != null && !hit.innerHits().isEmpty()) {
|
||||
InnerHitsResult sectionHits = (InnerHitsResult) hit.innerHits().get("sections");
|
||||
matchedDocumentBuilder.matchedSections(sectionHits.hits().hits().stream().map(innerHit -> convertInnerHit(innerHit)).collect(Collectors.toList()))
|
||||
.containsAllMatchedSections(sectionHits.hits().total().value() == sectionHits.hits().hits().size());
|
||||
}
|
||||
|
||||
return matchedDocumentBuilder.build();
|
||||
|
||||
}
|
||||
|
||||
|
||||
private Map<String, String> convertFileAttributes(Object fileAttributesSourceMap) {
|
||||
|
||||
Map<String, String> fileAttributes = new HashMap<>();
|
||||
|
||||
if (fileAttributesSourceMap != null) {
|
||||
List<HashMap<String, String>> list = new ObjectMapper().convertValue(fileAttributesSourceMap, ArrayList.class);
|
||||
list.forEach(r -> fileAttributes.put(r.get("name"), r.get("value")));
|
||||
}
|
||||
|
||||
return fileAttributes;
|
||||
}
|
||||
|
||||
|
||||
private MatchedSection convertInnerHit(Hit<JsonData> hit) {
|
||||
|
||||
JsonObject indexSection = hit.source().toJson().asJsonObject();
|
||||
|
||||
var jsonArray = indexSection.getJsonArray("pages");
|
||||
var pages = IntStream.range(0, jsonArray.size()).mapToObj(i -> jsonArray.getInt(i)).collect(Collectors.toSet());
|
||||
|
||||
return MatchedSection.builder()
|
||||
.headline(indexSection.get("headline") != null ? indexSection.getString("headline") : null)
|
||||
.sectionNumber(indexSection.getString("sectionNumber"))
|
||||
.pages(pages)
|
||||
.matchedTerms(hit.matchedQueries().stream().collect(Collectors.toSet()))
|
||||
.build();
|
||||
}
|
||||
|
||||
|
||||
private int getPageSizeOrDefault(int pageSize) {
|
||||
|
||||
if (pageSize <= 0) {
|
||||
return 10;
|
||||
}
|
||||
return pageSize;
|
||||
}
|
||||
|
||||
|
||||
private int getPageOrDefault(int page) {
|
||||
|
||||
if (page < 0) {
|
||||
return 0;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,9 +1,5 @@
|
||||
package com.iqser.red.service.search.v1.server.settings;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
@ -16,22 +12,11 @@ import lombok.Data;
|
||||
@ConfigurationProperties("elasticsearch")
|
||||
public class ElasticsearchSettings {
|
||||
|
||||
private List<String> hosts = new ArrayList<>();
|
||||
|
||||
private int port = 9300;
|
||||
|
||||
private String scheme = "http";
|
||||
|
||||
private String username;
|
||||
|
||||
private String password;
|
||||
|
||||
private int numberOfShards = 5;
|
||||
private int numberOfReplicas = 1;
|
||||
private int numberOfNestedObjectLimit = 100000;
|
||||
|
||||
/**
|
||||
* ES refresh policy for write requests to use. Used in tests to wait for completion of write requests.
|
||||
*/
|
||||
private WriteRequest.RefreshPolicy refreshPolicy = WriteRequest.RefreshPolicy.NONE;
|
||||
private String refreshPolicy = "true";
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,18 @@
|
||||
package com.iqser.red.service.search.v1.server.settings;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
@Data
|
||||
@Primary
|
||||
@Configuration
|
||||
@ConfigurationProperties("search-service")
|
||||
public class SearchServiceSettings {
|
||||
|
||||
private boolean dropAndRecreateIndex;
|
||||
private boolean migrateOnly;
|
||||
|
||||
}
|
||||
@ -1,41 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.utils;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.springframework.boot.actuate.elasticsearch.ElasticsearchRestHealthIndicator;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.Status;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
|
||||
@Slf4j
|
||||
@Component
|
||||
public class FailfastElasticSearchHealthIndicator extends ElasticsearchRestHealthIndicator {
|
||||
|
||||
public FailfastElasticSearchHealthIndicator(ElasticsearchClient elasticsearchClient) {
|
||||
|
||||
super(elasticsearchClient.getLowLevelClient());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void doHealthCheck(Health.Builder builder) throws Exception {
|
||||
|
||||
try {
|
||||
super.doHealthCheck(builder);
|
||||
Health healthStatus = builder.build();
|
||||
if (healthStatus.getStatus().equals(Status.DOWN)) {
|
||||
log.info("Elasticsearch health check returned {} , shutting down system...", healthStatus.getStatus());
|
||||
System.exit(1);
|
||||
} else {
|
||||
log.debug("Elasticsearch health status returned {} ", healthStatus.getStatus());
|
||||
}
|
||||
} catch (ElasticsearchException e) {
|
||||
log.info("Elasticsearch is not available, shutting down system...", e);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
package com.iqser.red.service.search.v1.server.utils;
|
||||
|
||||
import lombok.experimental.UtilityClass;
|
||||
|
||||
@UtilityClass
|
||||
public class IndexNameHelper {
|
||||
|
||||
private static final String SEARCH_INDEX = "%s_search";
|
||||
|
||||
|
||||
public String getSearchIndex(String indexPrefix) {
|
||||
|
||||
return String.format(SEARCH_INDEX, indexPrefix);
|
||||
}
|
||||
|
||||
}
|
||||
@ -5,6 +5,7 @@ import java.util.List;
|
||||
|
||||
import com.iqser.red.service.search.v1.server.model.Query;
|
||||
|
||||
import io.micrometer.core.instrument.util.StringUtils;
|
||||
import lombok.experimental.UtilityClass;
|
||||
|
||||
@UtilityClass
|
||||
@ -16,23 +17,26 @@ public class QueryStringConverter {
|
||||
boolean inQuots = false;
|
||||
List<String> musts = new ArrayList<>();
|
||||
List<String> shoulds = new ArrayList<>();
|
||||
char[] trimmedQuery = queryString.trim().toCharArray();
|
||||
for (int i = 0; i < trimmedQuery.length; i++) {
|
||||
|
||||
if (trimmedQuery[i] == '"' && !inQuots) {
|
||||
inQuots = true;
|
||||
} else if (trimmedQuery[i] == '"' && inQuots) {
|
||||
musts.add(sb.toString().trim());
|
||||
sb = new StringBuilder();
|
||||
inQuots = false;
|
||||
} else if (trimmedQuery[i] == ' ' && !inQuots && !sb.toString().isEmpty()) {
|
||||
shoulds.add(sb.toString().trim());
|
||||
sb = new StringBuilder();
|
||||
} else if (i == trimmedQuery.length - 1) {
|
||||
sb.append(trimmedQuery[i]);
|
||||
shoulds.add(sb.toString().trim());
|
||||
} else {
|
||||
sb.append(trimmedQuery[i]);
|
||||
if (StringUtils.isNotEmpty(queryString)) {
|
||||
char[] trimmedQuery = queryString.trim().toCharArray();
|
||||
for (int i = 0; i < trimmedQuery.length; i++) {
|
||||
|
||||
if (trimmedQuery[i] == '"' && !inQuots) {
|
||||
inQuots = true;
|
||||
} else if (trimmedQuery[i] == '"' && inQuots) {
|
||||
musts.add(sb.toString().trim());
|
||||
sb = new StringBuilder();
|
||||
inQuots = false;
|
||||
} else if (trimmedQuery[i] == ' ' && !inQuots && !sb.toString().isEmpty()) {
|
||||
shoulds.add(sb.toString().trim());
|
||||
sb = new StringBuilder();
|
||||
} else if (i == trimmedQuery.length - 1) {
|
||||
sb.append(trimmedQuery[i]);
|
||||
shoulds.add(sb.toString().trim());
|
||||
} else {
|
||||
sb.append(trimmedQuery[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,30 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.utils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import lombok.experimental.UtilityClass;
|
||||
|
||||
@UtilityClass
|
||||
public class ResourceLoader {
|
||||
|
||||
public String load(String classpathPath) {
|
||||
|
||||
URL resource = ResourceLoader.class.getClassLoader().getResource(classpathPath);
|
||||
if (resource == null) {
|
||||
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath);
|
||||
}
|
||||
try (InputStream is = resource.openStream();
|
||||
InputStreamReader isr = new InputStreamReader(is, StandardCharsets.UTF_8);
|
||||
BufferedReader br = new BufferedReader(isr)) {
|
||||
return br.lines().collect(Collectors.joining("\n"));
|
||||
} catch (IOException e){
|
||||
throw new IllegalArgumentException("could not load classpath resource: " + classpathPath, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,5 @@
|
||||
server:
|
||||
port: 8099
|
||||
|
||||
persistence-service.url: "http://localhost:8085"
|
||||
tenant-user-management-service.url: "http://localhost:8091/tenant-user-management/internal"
|
||||
@ -1,12 +1,31 @@
|
||||
info:
|
||||
description: Search Service Server V1
|
||||
|
||||
file-management-service.url: "http://file-management-service-v1:8080"
|
||||
persistence-service.url: "http://persistence-service-v1:8080"
|
||||
tenant-user-management-service.url: "http://tenant-user-management-service:8080/internal"
|
||||
|
||||
fforesight:
|
||||
tenants.remote: true
|
||||
tenant-exchange.name: 'tenants-exchange'
|
||||
|
||||
logging.pattern.level: "%5p [${spring.application.name},%X{traceId:-},%X{spanId:-}]"
|
||||
|
||||
logging.type: ${LOGGING_TYPE:CONSOLE}
|
||||
kubernetes.namespace: ${NAMESPACE:default}
|
||||
project.version: 1.0-SNAPSHOT
|
||||
|
||||
|
||||
server:
|
||||
port: 8080
|
||||
|
||||
lifecycle:
|
||||
base-package: com.iqser.red.service.search
|
||||
|
||||
spring:
|
||||
application:
|
||||
name: search-service
|
||||
main:
|
||||
allow-circular-references: true # FIXME
|
||||
profiles:
|
||||
active: kubernetes
|
||||
rabbitmq:
|
||||
@ -31,17 +50,11 @@ management:
|
||||
health.enabled: true
|
||||
endpoints.web.exposure.include: prometheus, health
|
||||
metrics.export.prometheus.enabled: ${monitoring.enabled:false}
|
||||
tracing:
|
||||
enabled: ${TRACING_ENABLED:false}
|
||||
sampling:
|
||||
probability: ${TRACING_PROBABILITY:1.0}
|
||||
otlp:
|
||||
tracing:
|
||||
endpoint: ${OTLP_ENDPOINT:http://otel-collector-opentelemetry-collector.otel-collector:4318/v1/traces}
|
||||
|
||||
elasticsearch:
|
||||
hosts:
|
||||
- ${elasticsearch.cluster.hosts}
|
||||
port: ${elasticsearch.cluster.port:9200}
|
||||
scheme: ${elasticsearch.cluster.scheme:http}
|
||||
username: ${elasticsearch.cluster.username}
|
||||
password: ${elasticsearch.cluster.password}
|
||||
|
||||
storage:
|
||||
signer-type: 'AWSS3V4SignerType'
|
||||
bucket-name: 'redaction'
|
||||
region: 'us-east-1'
|
||||
endpoint: 'https://s3.amazonaws.com'
|
||||
|
||||
@ -11,11 +11,37 @@
|
||||
},
|
||||
"filename": {
|
||||
"type": "text",
|
||||
"term_vector": "with_positions_offsets"
|
||||
"term_vector": "with_positions_offsets",
|
||||
"analyzer": "filename_analyzer"
|
||||
},
|
||||
"indexTime": {
|
||||
"date": {
|
||||
"type": "date"
|
||||
},
|
||||
"assignee": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"workflowStatus": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"dossierArchived": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"dossierDeleted": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"fileAttributes": {
|
||||
"type": "nested",
|
||||
"include_in_parent": true,
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"value": {
|
||||
"type": "text",
|
||||
"term_vector": "with_positions_offsets"
|
||||
}
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"type": "nested",
|
||||
"include_in_parent": true,
|
||||
|
||||
@ -0,0 +1,24 @@
|
||||
{
|
||||
"analysis": {
|
||||
"tokenizer": {
|
||||
"filename_tokenizer": {
|
||||
"type": "char_group",
|
||||
"tokenize_on_chars": [
|
||||
"whitespace",
|
||||
"punctuation",
|
||||
"symbol"
|
||||
]
|
||||
}
|
||||
},
|
||||
"analyzer": {
|
||||
"filename_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "filename_tokenizer",
|
||||
"filter": [
|
||||
"lowercase",
|
||||
"trim"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
<configuration>
|
||||
|
||||
<springProperty scope="configuration" name="logType" source="logging.type"/>
|
||||
<springProperty scope="context" name="application.name" source="spring.application.name"/>
|
||||
<springProperty scope="context" name="version" source="project.version"/>
|
||||
<include resource="org/springframework/boot/logging/logback/defaults.xml"/>
|
||||
<include resource="org/springframework/boot/logging/logback/console-appender.xml"/>
|
||||
|
||||
<appender name="JSON" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder class="net.logstash.logback.encoder.LogstashEncoder"/>
|
||||
</appender>
|
||||
|
||||
<root level="INFO">
|
||||
<appender-ref ref="${logType}"/>
|
||||
</root>
|
||||
|
||||
</configuration>
|
||||
@ -1,79 +1,114 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.time.Duration;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.assertj.core.util.Lists;
|
||||
import org.junit.ClassRule;
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
|
||||
import org.springframework.boot.test.autoconfigure.actuate.observability.AutoConfigureObservability;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.ApplicationContextInitializer;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.Application;
|
||||
import com.iqser.red.service.search.v1.server.client.ElasticsearchClient;
|
||||
import com.iqser.red.service.search.v1.server.settings.ElasticsearchSettings;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
@ComponentScan
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractElasticsearchIntegrationTest.SEARCH_BACKEND})
|
||||
@ContextConfiguration(initializers = {AbstractElasticsearchIntegrationTest.Initializer.class})
|
||||
@EnableFeignClients(basePackageClasses = AbstractElasticsearchIntegrationTest.TestConfiguration.class)
|
||||
@DirtiesContext
|
||||
public class AbstractElasticsearchIntegrationTest {
|
||||
@AutoConfigureObservability
|
||||
@SuppressWarnings("PMD")
|
||||
public abstract class AbstractElasticsearchIntegrationTest {
|
||||
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=IMMEDIATE";
|
||||
|
||||
|
||||
@ClassRule
|
||||
public static ElasticsearchContainer elasticsearchContainer = new ElasticsearchContainer(DockerImageName.parse("nexus.iqser.com:5001/bitnami/elasticsearch:7.13.2-debian-10-r1").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
|
||||
|
||||
@Autowired
|
||||
private IndexCreatorService indexCreationService;
|
||||
|
||||
@Autowired
|
||||
private StorageService storageService;
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
|
||||
public static final String SEARCH_BACKEND = "search.backend=elasticsearch";
|
||||
|
||||
@MockBean
|
||||
private AmazonS3 amazonS3;
|
||||
private TenantsClient tenantsClient;
|
||||
|
||||
@Configuration
|
||||
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
|
||||
@Import(Application.class)
|
||||
@EnableFeignClients(basePackageClasses = TestConfiguration.class)
|
||||
public static class TestConfiguration {
|
||||
@MockBean
|
||||
private RabbitAdmin rabbitAdmin;
|
||||
|
||||
@Bean
|
||||
public ElasticsearchClient elasticsearchClient() {
|
||||
@MockBean
|
||||
private RabbitListenerEndpointRegistry rabbitListenerEndpointRegistry;
|
||||
|
||||
ElasticsearchSettings elasticsearchSettings = new ElasticsearchSettings();
|
||||
String host = elasticsearchContainer.getHttpHostAddress();
|
||||
elasticsearchSettings.setHosts(Lists.newArrayList(host.substring(0, host.indexOf(':'))));
|
||||
elasticsearchSettings.setPort(Integer.parseInt(host.substring(host.indexOf(':') + 1)));
|
||||
|
||||
return new ElasticsearchClient(elasticsearchSettings);
|
||||
}
|
||||
private static int port;
|
||||
|
||||
|
||||
@Bean
|
||||
public IndexCreatorService indexCreationService(ElasticsearchClient elasticsearchClient,
|
||||
ElasticsearchSettings elasticsearchSettings) {
|
||||
@BeforeEach
|
||||
public void setupOptimize() {
|
||||
|
||||
return new IndexCreatorService(elasticsearchClient, elasticsearchSettings);
|
||||
|
||||
}
|
||||
TenantContext.setTenantId("redaction");
|
||||
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
|
||||
.searchConnection(SearchConnection.builder()
|
||||
.hosts(Set.of("localhost"))
|
||||
.port(port)
|
||||
.scheme("http")
|
||||
.numberOfShards("1")
|
||||
.numberOfReplicas("5")
|
||||
.indexPrefix("indexprefix")
|
||||
.build())
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public StorageService inmemoryStorage() {
|
||||
return new FileSystemBackedStorageService();
|
||||
static class Initializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
|
||||
|
||||
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
|
||||
|
||||
var esContainer = new ElasticsearchContainer(DockerImageName.parse("elasticsearch:8.6.2").asCompatibleSubstituteFor("docker.elastic.co/elasticsearch/elasticsearch"));
|
||||
esContainer.getEnvMap().put("xpack.security.enabled", "false");
|
||||
esContainer.start();
|
||||
|
||||
var esHost = esContainer.getHttpHostAddress();
|
||||
port = Integer.parseInt(esHost.substring(esHost.lastIndexOf(':') + 1));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@Autowired
|
||||
protected StorageService storageService;
|
||||
|
||||
@Configuration
|
||||
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
|
||||
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
|
||||
public static class TestConfiguration {
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
|
||||
|
||||
return new FileSystemBackedStorageService(objectMapper);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -0,0 +1,104 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.opensearch.testcontainers.OpensearchContainer;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
|
||||
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.cloud.openfeign.EnableFeignClients;
|
||||
import org.springframework.context.ApplicationContextInitializer;
|
||||
import org.springframework.context.ConfigurableApplicationContext;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.ComponentScan;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.testcontainers.utility.DockerImageName;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.server.Application;
|
||||
import com.iqser.red.storage.commons.StorageAutoConfiguration;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
import com.iqser.red.storage.commons.utils.FileSystemBackedStorageService;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
import com.knecon.fforesight.tenantcommons.TenantsClient;
|
||||
import com.knecon.fforesight.tenantcommons.model.SearchConnection;
|
||||
import com.knecon.fforesight.tenantcommons.model.TenantResponse;
|
||||
|
||||
@ComponentScan
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@SpringBootTest(classes = Application.class, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractOpensearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS, AbstractOpensearchIntegrationTest.SEARCH_BACKEND})
|
||||
@ContextConfiguration(initializers = {AbstractOpensearchIntegrationTest.Initializer.class})
|
||||
@EnableFeignClients(basePackageClasses = AbstractOpensearchIntegrationTest.TestConfiguration.class)
|
||||
@DirtiesContext
|
||||
@SuppressWarnings("PMD")
|
||||
public abstract class AbstractOpensearchIntegrationTest {
|
||||
|
||||
public static final String WAIT_FOR_WRITE_REQUESTS = "elasticsearch.refreshPolicy=wait_for";
|
||||
public static final String SEARCH_BACKEND = "search.backend=opensearch";
|
||||
|
||||
@MockBean
|
||||
private TenantsClient tenantsClient;
|
||||
|
||||
private static int port;
|
||||
|
||||
|
||||
@BeforeEach
|
||||
public void setupOptimize() {
|
||||
|
||||
TenantContext.setTenantId("redaction");
|
||||
when(tenantsClient.getTenant("redaction")).thenReturn(TenantResponse.builder()
|
||||
.searchConnection(SearchConnection.builder()
|
||||
.hosts(Set.of("localhost"))
|
||||
.port(port)
|
||||
.scheme("http")
|
||||
.numberOfShards("1")
|
||||
.numberOfReplicas("5")
|
||||
.indexPrefix("indexprefix")
|
||||
.build())
|
||||
.build());
|
||||
}
|
||||
|
||||
|
||||
static class Initializer implements ApplicationContextInitializer<ConfigurableApplicationContext> {
|
||||
|
||||
public void initialize(ConfigurableApplicationContext configurableApplicationContext) {
|
||||
|
||||
var esContainer = new OpensearchContainer(DockerImageName.parse("opensearchproject/opensearch:2.6.0"));
|
||||
|
||||
esContainer.start();
|
||||
|
||||
String esHost = esContainer.getHttpHostAddress();
|
||||
port = Integer.parseInt(esHost.substring(esHost.lastIndexOf(':') + 1));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Autowired
|
||||
protected StorageService storageService;
|
||||
|
||||
@Configuration
|
||||
@EnableAutoConfiguration(exclude = {RabbitAutoConfiguration.class})
|
||||
@ComponentScan(excludeFilters = {@ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, value = StorageAutoConfiguration.class)})
|
||||
public static class TestConfiguration {
|
||||
|
||||
@Bean
|
||||
@Primary
|
||||
public StorageService inmemoryStorage(ObjectMapper objectMapper) {
|
||||
|
||||
return new FileSystemBackedStorageService(objectMapper);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,53 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
|
||||
import com.iqser.red.storage.commons.exception.StorageObjectDoesNotExist;
|
||||
import com.iqser.red.storage.commons.service.StorageService;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class FileSystemBackedStorageService extends StorageService {
|
||||
|
||||
private final Map<String, File> dataMap = new HashMap<>();
|
||||
|
||||
public FileSystemBackedStorageService() {
|
||||
super(null, null);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public InputStreamResource getObject(String objectId) {
|
||||
|
||||
var res = dataMap.get(objectId);
|
||||
if (res == null) {
|
||||
throw new StorageObjectDoesNotExist(new RuntimeException());
|
||||
}
|
||||
return new InputStreamResource(new FileInputStream(res));
|
||||
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
@Override
|
||||
public void storeObject(String objectId, byte[] data) {
|
||||
File tempFile = File.createTempFile("test", ".tmp");
|
||||
|
||||
IOUtils.write(data, new FileOutputStream(tempFile));
|
||||
|
||||
dataMap.put(objectId, tempFile);
|
||||
}
|
||||
|
||||
public void clearStorage() {
|
||||
this.dataMap.forEach((k, v) -> {
|
||||
v.delete();
|
||||
});
|
||||
this.dataMap.clear();
|
||||
}
|
||||
}
|
||||
@ -1,78 +0,0 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.assertj.core.api.Assertions.assertThat;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.compress.utils.Lists;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.test.context.junit4.SpringRunner;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.search.v1.model.SearchResult;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
|
||||
@RunWith(SpringRunner.class)
|
||||
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT, properties = {AbstractElasticsearchIntegrationTest.WAIT_FOR_WRITE_REQUESTS})
|
||||
public class IndexCreatorTest extends AbstractElasticsearchIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@Autowired
|
||||
private DocumentIndexService documentIndexService;
|
||||
|
||||
@Autowired
|
||||
private SearchService searchService;
|
||||
|
||||
@Autowired
|
||||
private DocumentDeleteService documentDeleteService;
|
||||
|
||||
@MockBean
|
||||
private FileStatusClient fileStatusClient;
|
||||
|
||||
@MockBean
|
||||
private FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
|
||||
|
||||
@Test
|
||||
public void test() throws IOException {
|
||||
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
|
||||
ClassPathResource textResource2 = new ClassPathResource("files/Text2.json");
|
||||
Text text2 = objectMapper.readValue(textResource2.getInputStream(), Text.class);
|
||||
|
||||
documentIndexService.indexDocument("template1", "dossierId", "fileId", "Single Study - Oral (Gavage) Mouse.pdf", text);
|
||||
documentIndexService.indexDocument("template2", "dossierId2", "fileId2", "S-Metolachlor_RAR_01_Volume_1_2018-09-06.pdf", text2);
|
||||
|
||||
SearchResult result = searchService.search("hans klaus single", null, null, null, 1, 10, true);
|
||||
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(2);
|
||||
|
||||
documentDeleteService.deleteDocument("fileId");
|
||||
|
||||
result = searchService.search("hans klaus single", null, Arrays.asList("dossierId", "dossierId2"), null, 1, 10, true);
|
||||
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
|
||||
result = searchService.search("hans klaus single", null, Arrays.asList("dossierId3", "dossierId4"), null, 1, 10, true);
|
||||
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(0);
|
||||
|
||||
result = searchService.search("hans klaus single", Arrays.asList("template1", "template2"), null, null, 1, 10, true);
|
||||
|
||||
assertThat(result.getMatchedDocuments().size()).isEqualTo(1);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,80 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.junit.jupiter.api.Assertions;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.index.IndexInformation;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class IndexTest extends AbstractElasticsearchIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private IndexInformationService indexInformationService;
|
||||
|
||||
@MockBean
|
||||
private IndexInformationClient indexInformationClient;
|
||||
|
||||
@MockBean
|
||||
private FileStatusClient fileStatusClient;
|
||||
|
||||
@MockBean
|
||||
private IndexingMessageReceiver indexingMessageReceiver;
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testGenerateHash() {
|
||||
// Act
|
||||
String hash = indexInformationService.generateIndexConfigurationHash();
|
||||
|
||||
// Assert
|
||||
System.out.println(hash);
|
||||
Assertions.assertNotNull(hash);
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashChanged() {
|
||||
// Arrange
|
||||
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash("Some Hash").build();
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertTrue(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashChangedNot() {
|
||||
// Arrange
|
||||
String hash = indexInformationService.generateIndexConfigurationHash();
|
||||
IndexInformation indexInformation = IndexInformation.builder().indexConfigurationHash(hash).build();
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(indexInformation);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertFalse(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testHashDoesNotExist() {
|
||||
// Arrange
|
||||
when(indexInformationClient.getIndexInformation()).thenReturn(null);
|
||||
|
||||
// Act and Assert
|
||||
Assertions.assertTrue(indexInformationService.hasIndexChanged());
|
||||
}
|
||||
|
||||
}
|
||||
@ -0,0 +1,93 @@
|
||||
package com.iqser.red.service.search.v1.server.service;
|
||||
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.amqp.rabbit.core.RabbitAdmin;
|
||||
import org.springframework.amqp.rabbit.core.RabbitTemplate;
|
||||
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.Dossier;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileModel;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.FileType;
|
||||
import com.iqser.red.service.persistence.service.v1.api.shared.model.dossiertemplate.dossier.file.WorkflowStatus;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessage;
|
||||
import com.iqser.red.service.search.v1.model.IndexMessageType;
|
||||
import com.iqser.red.service.search.v1.model.SearchRequest;
|
||||
import com.iqser.red.service.search.v1.server.client.DossierClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusClient;
|
||||
import com.iqser.red.service.search.v1.server.client.FileStatusProcessingUpdateClient;
|
||||
import com.iqser.red.service.search.v1.server.client.IndexInformationClient;
|
||||
import com.iqser.red.service.search.v1.server.controller.SearchController;
|
||||
import com.iqser.red.service.search.v1.server.model.Text;
|
||||
import com.iqser.red.service.search.v1.server.queue.IndexingMessageReceiver;
|
||||
import com.iqser.red.service.search.v1.server.service.utils.MetricValidationUtils;
|
||||
import com.knecon.fforesight.tenantcommons.TenantContext;
|
||||
|
||||
import io.micrometer.prometheus.PrometheusMeterRegistry;
|
||||
import lombok.SneakyThrows;
|
||||
|
||||
public class MetricsIntegrationTest extends AbstractElasticsearchIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
@Autowired
|
||||
private IndexingMessageReceiver indexingMessageReceiver;
|
||||
@Autowired
|
||||
private PrometheusMeterRegistry prometheusMeterRegistry;
|
||||
@Autowired
|
||||
private SearchController searchController;
|
||||
@MockBean
|
||||
private FileStatusClient fileStatusClient;
|
||||
@MockBean
|
||||
private DossierClient dossierClient;
|
||||
@MockBean
|
||||
private FileStatusProcessingUpdateClient fileStatusProcessingUpdateClient;
|
||||
@MockBean
|
||||
private IndexInformationClient indexInformationClient;
|
||||
@MockBean
|
||||
private RabbitTemplate rabbitTemplate;
|
||||
|
||||
|
||||
@Test
|
||||
@SneakyThrows
|
||||
public void testMetrics() {
|
||||
|
||||
var dossier = new Dossier();
|
||||
dossier.setId("1");
|
||||
when(dossierClient.getDossierById("1", true, true)).thenReturn(dossier);
|
||||
|
||||
var file = new FileModel();
|
||||
file.setDossierId("1");
|
||||
file.setId("1");
|
||||
file.setWorkflowStatus(WorkflowStatus.NEW);
|
||||
when(fileStatusClient.getFileStatus("1", "1")).thenReturn(file);
|
||||
|
||||
ClassPathResource textResource = new ClassPathResource("files/Text2.json");
|
||||
Text text = objectMapper.readValue(textResource.getInputStream(), Text.class);
|
||||
storageService.storeJSONObject(TenantContext.getTenantId(), TextStorageService.StorageIdUtils.getStorageId("1", "1", FileType.SIMPLIFIED_TEXT), text);
|
||||
|
||||
IndexMessage indexRequest = new IndexMessage();
|
||||
indexRequest.setDossierId("1");
|
||||
indexRequest.setFileId("1");
|
||||
indexRequest.setDossierTemplateId("1");
|
||||
indexRequest.setMessageType(IndexMessageType.INSERT);
|
||||
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
|
||||
|
||||
indexRequest.setMessageType(IndexMessageType.UPDATE);
|
||||
indexingMessageReceiver.receiveIndexingRequest(indexRequest);
|
||||
|
||||
searchController.getDossierStatus(SearchRequest.builder().queryString("test").build());
|
||||
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_indexDocument", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_updateDocument", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_getTextSearchService", 1, null);
|
||||
MetricValidationUtils.validateMetric(prometheusMeterRegistry, "redactmanager_search", 1, null);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user