diff --git a/.github/workflows/build-workflow.yml b/.github/workflows/build-workflow.yml
new file mode 100644
index 00000000..7a584803
--- /dev/null
+++ b/.github/workflows/build-workflow.yml
@@ -0,0 +1,27 @@
+name: Build and test
+
+on: [ push ]
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ container:
+ # TODO https://github.com/opendevstack/ods-quickstarters/issues/766
+ image: s2obcn/jdk-11_openj9-wkhtmltopdf-ubi:main
+ env:
+ WKHTML_TO_PDF_WITH_DOCKER: false
+ volumes:
+ - /var/run/docker.sock:/var/run/docker.sock
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Build with Gradle
+ run: ./gradlew clean test --stacktrace --no-daemon
+
+ - name: TestReport
+ uses: actions/upload-artifact@v3
+ if: ${{ always() }}
+ with:
+ name: JUnit Report
+ path: build/reports/**/**
diff --git a/.github/workflows/changelog-enforcer.yml b/.github/workflows/changelog-enforcer.yml
index 4ce32064..43376980 100644
--- a/.github/workflows/changelog-enforcer.yml
+++ b/.github/workflows/changelog-enforcer.yml
@@ -8,7 +8,7 @@ jobs:
changelog:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v3
- uses: dangoslen/changelog-enforcer@v2
with:
changeLogPath: 'CHANGELOG.md'
diff --git a/.github/workflows/continuous-integration-workflow.yml b/.github/workflows/continuous-integration-workflow.yml
deleted file mode 100644
index 49c537fd..00000000
--- a/.github/workflows/continuous-integration-workflow.yml
+++ /dev/null
@@ -1,67 +0,0 @@
-name: Document Generation Service
-
-on: [ push, pull_request ]
-
-jobs:
- build:
-
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v1
- - name: Set up JDK 11
- uses: actions/setup-java@v1
- with:
- java-version: 11
- - name: download and install wkhtml
- run: |
- sudo apt install curl
- sudo curl -kLO https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.4/wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
- sudo tar vxf wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
- sudo mv wkhtmltox/bin/wkhtmlto* /usr/bin
- - name: Build with Gradle
- run: ./gradlew clean test shadowJar --stacktrace --no-daemon
- env:
- NO_NEXUS: true
- - uses: actions/cache@v1
- with:
- path: ~/.gradle/caches
- key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
- restore-keys: |
- ${{ runner.os }}-gradle-
- - name: TestReport
- uses: actions/upload-artifact@v2
- if: ${{ always() }}
- with:
- name: JUnit Report
- path: build/reports/tests/test/**
- - name: copy created artifacts into docker context
- run: |
- cp build/libs/*-all.jar ./docker/app.jar
- - name: Build docker image
- if: success()
- run: |
- COMMIT_AUTHOR=$(git --no-pager show -s --format='%an (%ae)' $GITHUB_SHA)
- COMMIT_MESSAGE=$(git log -1 --pretty=%B $GITHUB_SHA)
- COMMIT_TIME=$(git show -s --format=%ci $GITHUB_SHA)
- BUILD_TIME=$(date -u "+%Y-%m-%d %H:%M:%S %z")
- docker build \
- --label "ods.build.job.url=https://github.com/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" \
- --label "ods.build.source.repo.ref=$GITHUB_REF" \
- --label "ods.build.source.repo.commit.author=$COMMIT_AUTHOR" \
- --label "ods.build.source.repo.commit.msg=$COMMIT_MESSAGE" \
- --label "ods.build.source.repo.commit.sha=$GITHUB_SHA" \
- --label "ods.build.source.repo.commit.timestamp=$COMMIT_TIME" \
- --label "ods.build.source.repo.url=https://github.com/$GITHUB_REPOSITORY.git" \
- --label "ods.build.timestamp=$BUILD_TIME" \
- -t ods-document-generation-svc:local .
- docker inspect ods-document-generation-svc:local --format='{{.Config.Labels}}'
- working-directory: docker
- - name: Push docker image
- if: success() && github.repository == 'opendevstack/ods-document-generation-svc' && github.event_name == 'push'
- shell: bash
- env:
- DOCKER_USER: ${{ secrets.DockerHubUser }}
- DOCKER_PASS: ${{ secrets.DockerHubPass }}
- run: ./.github/workflows/push-image.sh ${{ github.ref }} "$DOCKER_USER" "$DOCKER_PASS"
-
diff --git a/.github/workflows/e2e-tests-workflow.yml b/.github/workflows/e2e-tests-workflow.yml
new file mode 100644
index 00000000..0eca0d18
--- /dev/null
+++ b/.github/workflows/e2e-tests-workflow.yml
@@ -0,0 +1,36 @@
+name: e2e and performance Test
+
+on: [ pull_request ]
+
+jobs:
+ e2eTest:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Docker test
+ run: ./gradlew dockerTest --stacktrace --no-daemon
+
+ - name: TestReport
+ uses: actions/upload-artifact@v3
+ if: ${{ always() }}
+ with:
+ name: JUnit e2e Report
+ path: build/reports/**/**
+
+ performanceTest:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v3
+
+ - name: Performance test
+ run: ./gradlew gatlingRun --stacktrace --no-daemon
+
+ - name: TestReport
+ uses: actions/upload-artifact@v3
+ if: ${{ always() }}
+ with:
+ name: JUnit performance test Report
+ path: build/reports/**/**
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9b483b13..208dfb11 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,19 +1,31 @@
# Changelog
+- Temporal change with add LevaDoc 3.
+
+
## Unreleased
- Fix TIR and DTR documents are not properly indexed ([#55](https://github.com/opendevstack/ods-document-generation-svc/pull/55))
- Fix wkhtmltox hangs ([#66](https://github.com/opendevstack/ods-document-generation-svc/pull/66))
- Improve memory management and error handling ([#70](https://github.com/opendevstack/ods-document-generation-svc/pull/70))
-- ODS AMI build failing due to broken x11 fonts package installation ([#74](https://github.com/opendevstack/ods-document-generation-svc/pull/74))
+- Use Markdown Architectural Decision Records https://adr.github.io/madr/
+- Improve maintainability by adding SpringBoot framework
+- Added IT (Docker tests)
+- Added performance tests
+- logback.xml can be overridden from command line
+- removed unused params from payloads.
+- removed param jekins to jenkins
+- Removed byte[] use
+- Set test summary for empty description in TCR ([#837](https://github.com/opendevstack/ods-jenkins-shared-library/pull/837))
+- Set test summary for empty description in TCR for acceptance tests ([#837](https://github.com/opendevstack/ods-jenkins-shared-library/pull/844))
+
+## [4.0] - 2021-18-11
+=======
+### Added
+- Added log to print /document endpoint input
### Fixed
- Github template tests fail in proxy environment ([#56](https://github.com/opendevstack/ods-document-generation-svc/issues/56))
--
-
-## [4.0] - 2021-15-11
-
-### Added
-- Added log to print /document endpoint input
+- Fix TIR and DTR documents are not properly indexed ([#55](https://github.com/opendevstack/ods-document-generation-svc/pull/55))
### Changed
- Updated maxRequestSize value from 100m to 200m
diff --git a/Jenkinsfile b/Jenkinsfile
deleted file mode 100644
index a4fa2bf5..00000000
--- a/Jenkinsfile
+++ /dev/null
@@ -1,71 +0,0 @@
-def odsNamespace
-def odsGitRef
-def odsImageTag
-node {
- odsNamespace = env.ODS_NAMESPACE ?: 'ods'
- odsImageTag = env.ODS_IMAGE_TAG ?: 'latest'
- odsGitRef = env.ODS_GIT_REF ?: 'master'
-}
-
-library("ods-jenkins-shared-library@${odsGitRef}")
-
-odsComponentPipeline(
- imageStreamTag: "${odsNamespace}/jenkins-agent-maven:${odsImageTag}",
- branchToEnvironmentMapping: [:],
- debug: true,
- resourceRequestMemory: '3Gi',
- resourceLimitMemory: '3Gi'
-) { context ->
- stageBuild(context)
- odsComponentStageScanWithSonar(context, [branch: '*'])
- odsComponentStageBuildOpenShiftImage(context, [branch: '*'])
- stageCreatedImageTagLatest(context)
-}
-
-def stageBuild(def context) {
- def javaOpts = "-Xmx512m"
- def gradleTestOpts = "-Xmx128m"
-
- stage('Build and Unit Test') {
- withEnv(["TAGVERSION=${context.tagversion}", "NEXUS_HOST=${context.nexusHost}", "NEXUS_USERNAME=${context.nexusUsername}", "NEXUS_PASSWORD=${context.nexusPassword}", "JAVA_OPTS=${javaOpts}", "GRADLE_TEST_OPTS=${gradleTestOpts}"]) {
-
- // get wkhtml
- sh (
- script : """
- curl -kLO https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.4/wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
- tar vxf wkhtmltox-0.12.4_linux-generic-amd64.tar.xz
- mv wkhtmltox/bin/wkhtmlto* /usr/bin
- """,
- label : "get and install wkhtml"
- )
-
- def status = sh(
- script: "./gradlew clean test shadowJar --stacktrace --no-daemon",
- label : "gradle build",
- returnStatus: true
- )
- if (status != 0) {
- error "Build failed!"
- }
-
- status = sh(
- script: "cp build/libs/*-all.jar ./docker/app.jar",
- label : "copy resources into docker context",
- returnStatus: true
- )
- if (status != 0) {
- error "Copying failed!"
- }
- }
- }
-}
-
-def stageCreatedImageTagLatest(def context) {
- stage('Tag created image') {
- def targetImageTag = context.gitBranch.replace('/','_').replace('-','_')
- sh(
- script: "oc -n ${context.cdProject} tag ${context.componentId}:${context.shortGitCommit} ${context.componentId}:${targetImageTag}",
- label: "Set tag '${targetImageTag}' on is/${context.componentId}"
- )
- }
-}
diff --git a/README.md b/README.md
index 007307bf..0ad987b2 100644
--- a/README.md
+++ b/README.md
@@ -2,44 +2,61 @@
# ODS Document Generation Service
-A document generation service that transforms document templates in a remote Bitbucket repository into PDF documents.
+A document generation service that:
+- Transforms document templates in a remote Bitbucket repository into PDF documents.
+- Generate LeVA documentation and uploads them to a Nexus service
## Distribute
+In order to generate a distributed app a Docker client needs to be configured.
+- The _gradle_ task: __dockerBuildImage__ generates a Docker image with the application
+
+### Environment
+The file src/main/resources/application.yml has the properties and parameters that the app admit.
+
+## Unit Tests
+In order to execute the tests a Docker client needs to be configured.
```
-make shadowJar
+gradle test
```
-## Run
+## Integration Tests
+In order to execute the tests a Docker client needs to be configured.
```
-make run
+gradle dockerTest
```
-## Test
+## Performance Tests
+In order to execute the tests a Docker client needs to be configured.
```
-make test
+gradle gatlingRun
```
## Document Templates
-
When processing a template `type` at a specific `version`, and data into a document, the DocGen service expects the BitBucket repository to have a `release/${version}` branch that contains the template type at `/templates/${type}.html.tmpl`.
-## Requirements
+## LeVA Doc generation
+When generating LeVA documentation, the app expects the xml tests results and the Jenkins Logs to be in a Nexus service.
-### Packages
+## Structure package
+Main groovy code in src/main/groovy:
+- org.ods.doc.gen.core: common 'utilities'
+- org.ods.doc.gen.adapters: clients to other systems. They translate the interfaces of external systems to the interfaces required.
+- org.ods.doc.gen.doc: LeVA document generation functional module
+- org.ods.doc.pdf.builder: PDF generation document functional module
-- [wkhtmltopdf](https://wkhtmltopdf.org/)
-### Environment
+## History
+The module __"leva.doc"__ is a refactor extracted from the LevaDoc functionality of the [shared-library](https://github.com/opendevstack/ods-jenkins-shared-library).
+You can see the shared-library before the refactor here: https://github.com/opendevstack/ods-jenkins-shared-library/tree/feature/beforeMoveLevaDoc
-- `BITBUCKET_URL`
-- `BITBUCKET_USERNAME`
-- `BITBUCKET_PASSWORD`
-- `BITBUCKET_DOCUMENT_TEMPLATES_PROJECT`
-- `BITBUCKET_DOCUMENT_TEMPLATES_REPO`
+Some correspondences between the shared-lib classes and this project:
+- org.ods.orchestration.util.Project => org.ods.doc.gen.project.data.ProjectData
+- org.ods.orchestration.usecase.LeVADocumentUseCase => org.ods.doc.gen.leva.doc.services.LeVADocumentService
+- org.ods.orchestration.usecase.DocGenUseCase => org.ods.doc.gen.leva.doc.services.DocGenService
+- org.ods.orchestration.util.DocumentHistory => org.ods.doc.gen.leva.doc.services.DocumentHistory
+- org.ods.orchestration.usecase.PDFUtil => org.ods.doc.gen.leva.doc.services.PDFService
-## Reference
-This project is based on https://github.com/jooby-project/gradle-starter.
diff --git a/build.gradle b/build.gradle
index c8ec71f2..4cba513c 100644
--- a/build.gradle
+++ b/build.gradle
@@ -1,180 +1,293 @@
buildscript {
ext {
- nexus_url = "${project.findProperty('nexus_url') ?: System.getenv('NEXUS_HOST')}"
- nexus_user = "${project.findProperty('nexus_user') ?: System.getenv('NEXUS_USERNAME')}"
- nexus_pw = "${project.findProperty('nexus_pw') ?: System.getenv('NEXUS_PASSWORD')}"
- no_nexus = (project.findProperty('no_nexus') ?: System.getenv('NO_NEXUS') ?: false).toBoolean()
- if (!no_nexus && (nexus_url == "null" || nexus_user == "null" || nexus_pw == "null")) {
- throw new GradleException("property no_nexus='false' (or not defined) but at least one of the properties nexus_url, nexus_user or nexus_pw is not configured. Please configure those properties!")
- }
-
- def folderRel = (String)("${project.findProperty('nexus_folder_releases') ?: System.getenv('NEXUS_FOLDER_RELEASES')}")
- nexusFolderReleases = folderRel == "null" ? "maven-releases" : folderRel
-
- def folderSnaps = (String)("${project.findProperty('nexus_folder_snapshots') ?: System.getenv('NEXUS_FOLDER_SNAPSHOTS')}")
- nexusFolderSnapshots = folderSnaps == "null" ? "maven-snapshots" : folderSnaps
-
- snippetsDir = file('build/generated-snippets')
+ groovyallVersion = '3.0.10'
+ springbootVersion = "2.6.6"
+ springCloudVersion = '3.1.1'
+ spockCoreVersion = "2.1-groovy-3.0"
+ spockReportsVersion = '2.3.0-groovy-3.0'
}
repositories {
- if (no_nexus) {
- println("using repositories 'jcenter' and 'mavenCentral', because property no_nexus=$no_nexus")
- jcenter()
- mavenCentral()
- } else {
- println("using nexus repositories")
- maven() {
- url "${nexus_url}/repository/jcenter/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
-
- maven() {
- url "${nexus_url}/repository/maven-public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
+ println("using repositories 'jcenter' and 'mavenCentral'")
+ mavenLocal()
+ jcenter()
+ mavenCentral()
+ maven {
+ url "https://repo1.maven.org/maven2/"
+ mavenContent {
+ releasesOnly()
}
-
- maven() {
- url "${nexus_url}/repository/atlassian_public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
+ }
+ maven {
+ url "https://repo.jenkins-ci.org/releases/"
+ mavenContent {
+ releasesOnly()
}
}
}
}
-buildscript {
- ext {
- joobyVersion = "1.6.6"
- }
-
- dependencies {
- classpath "org.jooby:jooby-gradle-plugin:$joobyVersion"
- }
-}
-
plugins {
- id "com.github.johnrengelman.shadow"
- id "com.google.osdetector" version "1.6.2"
- id "io.spring.dependency-management" version "1.0.8.RELEASE"
+ id "groovy"
+ id 'com.adarshr.test-logger' version '3.2.0'
+ id 'jacoco'
+ id 'org.springframework.boot' version "${springbootVersion}"
+ id 'com.bmuschko.docker-spring-boot-application' version '7.3.0'
+ id "io.gatling.gradle" version "3.7.6.1"
}
-
-apply plugin: "application"
-apply plugin: "com.github.johnrengelman.shadow"
-apply plugin: "groovy"
-apply plugin: "jooby"
-apply plugin: "jacoco"
+// related to gatling and Springboot
+ext['netty.version'] = '4.0.51.Final'
repositories {
- if (no_nexus) {
- println("using repositories 'jcenter' and 'mavenCentral', because property no_nexus=$no_nexus")
- jcenter()
- mavenCentral()
- } else {
- println("using nexus repositories")
- maven() {
- url "${nexus_url}/repository/jcenter/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
+ mavenCentral()
+ println("using repositories 'jcenter' and 'mavenCentral'")
+ mavenCentral()
+}
- maven() {
- url "${nexus_url}/repository/maven-public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
+group = 'org.ods'
+version = '1.0'
+ext {
+ // TODO https://github.com/opendevstack/ods-quickstarters/issues/766
+ imageBaseForApp ="s2obcn/jdk-11_openj9-wkhtmltopdf-ubi:main"
+ imageForTest = "jdk-11_openj9-wkhtmltopdf-ubi:local"
+}
- maven() {
- url "${nexus_url}/repository/atlassian_public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
+java {
+ toolchain {
+ languageVersion.set(JavaLanguageVersion.of(11))
}
}
-group = 'org.ods'
-version = '0.1'
-mainClassName = "app.App"
-sourceCompatibility = 1.8
+compileGroovy {
+ groovyOptions.javaAnnotationProcessing = true
+}
-dependencyManagement {
- imports {
- mavenBom "org.jooby:jooby-bom:$joobyVersion"
+configurations.all {
+ resolutionStrategy {
+ force (group: 'org.codehaus.groovy', name: 'groovy-all', version: groovyallVersion)
}
}
dependencies {
- implementation "com.github.ben-manes.caffeine:caffeine:2.7.0"
- implementation "com.github.jknack:handlebars:4.1.2"
- implementation "commons-io:commons-io:2.11.0"
- implementation "io.github.openfeign:feign-core:10.2.3"
- implementation "io.github.openfeign:feign-gson:10.2.3"
- implementation "io.github.openfeign:feign-okhttp:10.2.3"
- implementation "net.lingala.zip4j:zip4j:1.3.3"
- implementation "org.apache.httpcomponents:httpclient:4.5.8"
- implementation "org.codehaus.groovy:groovy-all:2.5.7"
- implementation "org.jooby:jooby-jackson"
- implementation "org.jooby:jooby-netty"
-
- implementation "io.netty:netty-transport-native-epoll:${dependencyManagement.importedProperties['netty.version']}:${osdetector.classifier.contains('linux') ? 'linux-x86_64' : ''}"
- implementation "io.netty:netty-tcnative-boringssl-static:${dependencyManagement.importedProperties['boringssl.version']}"
-
- implementation "org.apache.pdfbox:pdfbox:2.0.24"
-
- testImplementation "junit:junit:4.12"
- testImplementation "com.github.stefanbirkner:system-rules:1.19.0" // for managing environment variables
- testImplementation "com.github.tomakehurst:wiremock:2.23.2" // for mocking HTTP server reponses
- testImplementation "io.rest-assured:rest-assured:4.0.0" // for validating REST services
- testImplementation "org.spockframework:spock-core:1.3-groovy-2.5"
+ implementation('org.codehaus.groovy:groovy-json:3.0.10')
- testImplementation "cglib:cglib-nodep:3.3.0" // for mocking classes
- testImplementation "org.objenesis:objenesis:3.1"
-}
+ implementation (group: 'org.codehaus.groovy', name: 'groovy-all', version: groovyallVersion){
+ exclude group: "org.codehaus.groovy", module: "groovy-test-junit5"
+ }
-import com.github.jengelman.gradle.plugins.shadow.transformers.NewGroovyExtensionModuleTransformer
+ implementation "org.springframework.boot:spring-boot-starter:${springbootVersion}"
+ implementation "org.springframework.boot:spring-boot-starter-web:${springbootVersion}"
+ implementation "org.springframework.boot:spring-boot-starter-cache:${springbootVersion}"
+ implementation "org.springframework.cloud:spring-cloud-starter-openfeign:${springCloudVersion}"
-shadowJar {
- mergeServiceFiles()
- mergeGroovyExtensionModules()
- transform(NewGroovyExtensionModuleTransformer)
-}
+ implementation("javax.inject:javax.inject:1")
+ implementation("javax.cache:cache-api:1.1.1")
-test {
- outputs.dir snippetsDir
+ implementation 'com.fasterxml.jackson.core:jackson-databind'
+ implementation 'com.github.ben-manes.caffeine:caffeine:3.0.5'
+ implementation 'com.github.jknack:handlebars:4.3.0'
+ implementation 'commons-io:commons-io:2.11.0'
+ implementation 'io.github.openfeign:feign-core:11.8'
+ implementation 'io.github.openfeign:feign-gson:11.8'
+ implementation 'io.github.openfeign:feign-okhttp:11.8'
+ implementation 'io.github.openfeign:feign-jackson:11.8'
+
+ implementation 'net.lingala.zip4j:zip4j:2.9.1'
+ implementation 'org.apache.httpcomponents:httpclient:4.5.13'
+ implementation 'org.apache.pdfbox:pdfbox:2.0.25'
+
+ implementation('ch.qos.logback:logback-classic:1.2.11')
+ implementation 'org.eclipse.jgit:org.eclipse.jgit:6.0.0.202111291000-r'
+
+ testImplementation "org.junit.jupiter:junit-jupiter-api"
+ testImplementation "org.junit.jupiter:junit-jupiter-engine"
+ testImplementation("uk.org.webcompere:system-stubs-core:1.2.0")
+ testImplementation "org.testcontainers:spock:1.16.3"
+ testImplementation("org.testcontainers:testcontainers:1.16.3")
+
+ testImplementation("org.spockframework:spock-core:${spockCoreVersion}")
+ testImplementation ("com.athaydes:spock-reports:$spockReportsVersion"){ transitive = false }
+ testImplementation "org.spockframework:spock-spring:${spockCoreVersion}"
+ testImplementation "org.springframework.boot:spring-boot-starter-test:${springbootVersion}"
+
+ testImplementation "com.github.stefanbirkner:system-rules:1.19.0"
+ testImplementation "com.github.tomakehurst:wiremock-jre8:2.32.0"
+ testImplementation 'io.rest-assured:rest-assured:4.5.1'
+ testImplementation("de.redsix:pdfcompare:1.1.61")
+
+ gatlingImplementation 'org.awaitility:awaitility:4.2.0'
+ gatlingImplementation 'io.rest-assured:rest-assured:4.5.1'
+
+ // From shared lib
+ implementation('com.konghq:unirest-java:3.13.6')
+ implementation("org.yaml:snakeyaml:1.30")
+ implementation('com.vladsch.flexmark:flexmark-all:0.64.0')
+ implementation('fr.opensagres.xdocreport:fr.opensagres.poi.xwpf.converter.core:2.0.3')
+ implementation('fr.opensagres.xdocreport:fr.opensagres.poi.xwpf.converter.pdf:2.0.3')
+ implementation("org.apache.poi:poi-ooxml:4.1.2")
+ implementation("org.apache.pdfbox:pdfbox:2.0.25")
+ implementation("net.lingala.zip4j:zip4j:2.9.1")
+ implementation("com.xlson.groovycsv:groovycsv:1.3")
- // Use overrides in conf/application.test.conf in ConfigFactory.load()
- // TODO: setting application.env should be enough, but apparently isn't
- systemProperty "application.env", "test"
- systemProperty "config.resource", "application.test.conf"
+ testImplementation group: 'org.assertj', name: 'assertj-core', version: '3.21.0'
+ testImplementation group: 'commons-io', name: 'commons-io', version: '2.8.0'
+ testImplementation "com.github.stefanbirkner:system-rules:1.19.0" // for managing environment variables
+ testImplementation 'org.hamcrest:hamcrest:2.2'
+ testImplementation "org.skyscreamer:jsonassert:1.5.0"
+
+ testImplementation("au.com.dius:pact-jvm-provider:4.0.10")
+ testImplementation("au.com.dius:pact-jvm-provider-junit:4.0.10")
+}
+
+test {
testLogging {
showStandardStreams = true
+ exceptionFormat = 'full'
+ }
+ filter {
+ excludeTestsMatching "*IT"
}
+ systemProperty "spock.configuration", "SpockConfig.groovy"
+ systemProperty 'com.athaydes.spockframework.report.outputDir', 'build/reports/spock'
+ systemProperty 'com.athaydes.spockframework.report.projectName', 'docGen'
+ systemProperty 'com.athaydes.spockframework.report.projectVersion', version
+ systemProperty "testRecordMode", project.findProperty('testRecordMode')?: false
+ systemProperty "generateExpectedPdfFiles", project.findProperty('generateExpectedPdfFiles')?: false
+ systemProperty "wiremock.textToReplace", project.findProperty('wiremock.textToReplace')?: ""
- finalizedBy jacocoTestReport
+ systemProperties << propertiesForTest()
+
+ maxHeapSize = "4096m"
+ useJUnitPlatform()
+}
+
+task dockerTest(type: Test) {
+ group("verification")
+ filter {
+ includeTestsMatching "*IT"
+ }
+ systemProperty 'com.athaydes.spockframework.report.outputDir', 'build/reports/spock'
+ useJUnitPlatform()
}
jacocoTestReport {
+ dependsOn test
reports {
xml.enabled true
+ html.enabled true
+ }
+}
+
+jacocoTestCoverageVerification {
+ dependsOn jacocoTestReport
+ violationRules {
+ rule {
+ limit {
+ minimum = 0.7
+ }
+ }
}
}
-/** We diverge from the default resources structure to adopt the Jooby standard: */
-sourceSets.main.resources {
- srcDirs = ["conf", "public"]
+import com.bmuschko.gradle.docker.tasks.image.*
+task buildImageForTest(type: DockerBuildImage) {
+ group = "docker"
+ inputDir = file("docker")
+ images.add(project.ext.imageForTest as String)
+ buildArgs = getProxyMap()
}
+
+def getProxyMap(){
+ def buildArgs = [:]
+ def http_proxy = "${project.findProperty('http_proxy') ?: System.getenv('HTTP_PROXY')}"
+ def https_proxy = "${project.findProperty('https_proxy') ?: System.getenv('HTTPS_PROXY')}"
+ if (http_proxy && http_proxy != "null")
+ buildArgs << ["http_proxy": http_proxy, "https_proxy": https_proxy]
+
+ return buildArgs
+}
+
+docker {
+ springBootApplication {
+ baseImage = project.ext.imageBaseForApp
+ ports = [8080]
+ images = ['ods-document-generation-svc:local']
+ jvmArgs = ["-XX:+UseCompressedOops", "-XX:+UseParallelGC", "-XX:+UseParallelOldGC"]
+ }
+}
+
+test.dependsOn(buildImageForTest)
+dockerBuildImage.dependsOn(bootJar)
+dockerTest.dependsOn(dockerBuildImage)
+dockerTest.mustRunAfter(test)
+
+bootRun {
+ systemProperties << executionProperties()
+}
+
+import com.bmuschko.gradle.docker.tasks.container.*
+
+task createServerForGalting(type: DockerCreateContainer) {
+ dependsOn dockerBuildImage
+ targetImageId dockerBuildImage.getImageId()
+ hostConfig.portBindings = ['1111:8080']
+ hostConfig.autoRemove = true
+}
+
+task startDocGenServerForGatling(type: DockerStartContainer) {
+ group = "docker"
+ dependsOn createServerForGalting
+ targetContainerId createServerForGalting.getContainerId()
+}
+
+task stopDocGenServerForGatling(type: DockerStopContainer) {
+ group = "docker"
+ targetContainerId createServerForGalting.getContainerId()
+}
+
+gatlingRun.group("verification")
+gatlingRun.mustRunAfter(dockerTest)
+gatlingRun.dependsOn(startDocGenServerForGatling)
+gatlingRun.finalizedBy(stopDocGenServerForGatling)
+
+check.dependsOn(jacocoTestCoverageVerification, dockerTest, gatlingRun)
+
+Map executionProperties(){
+ def bitbucket_url = "${project.findProperty('bitbucket_url') ?: System.getenv('BITBUCKET_HOST')}"
+ def bitbucket_username = "${project.findProperty('bitbucket_username') ?: System.getenv('BITBUCKET_USERNAME')}"
+ def bitbucket_password = "${project.findProperty('bitbucket_password') ?: System.getenv('BITBUCKET_PASSWORD')}"
+
+ def jira_url = "${project.findProperty('jira_url') ?: System.getenv('JIRA_HOST')}"
+ def jira_username = "${project.findProperty('jira_username') ?: System.getenv('JIRA_USERNAME')}"
+ def jira_password = "${project.findProperty('jira_password') ?: System.getenv('JIRA_PASSWORD')}"
+
+ def nexus_url = "${project.findProperty('nexus_url') ?: System.getenv('NEXUS_HOST')}"
+ def nexus_username = "${project.findProperty('nexus_username') ?: System.getenv('NEXUS_USERNAME')}"
+ def nexus_password = "${project.findProperty('nexus_password') ?: System.getenv('NEXUS_PASSWORD')}"
+
+ return [
+ "BITBUCKET_URL": bitbucket_url,
+ "BITBUCKET_USERNAME": bitbucket_username,
+ "BITBUCKET_PASSWORD": bitbucket_password,
+
+ "JIRA_URL": jira_url,
+ "JIRA_USERNAME": jira_username,
+ "JIRA_PASSWORD": jira_password,
+
+ "NEXUS_URL": nexus_url,
+ "NEXUS_USERNAME": nexus_username,
+ "NEXUS_PASSWORD": nexus_password
+ ]
+}
+
+Map propertiesForTest(){
+ List urls = ["BITBUCKET_URL", "JIRA_URL", "NEXUS_URL"]
+ Map execProp = executionProperties()
+ urls.each {
+ if (execProp[it] == "null"){
+ execProp[it] = "http://dummy"
+ }
+ }
+ return execProp
+}
\ No newline at end of file
diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle
deleted file mode 100644
index 026e9fd4..00000000
--- a/buildSrc/build.gradle
+++ /dev/null
@@ -1,58 +0,0 @@
-ext {
- nexus_url = "${project.findProperty('nexus_url') ?: System.getenv('NEXUS_HOST')}"
- nexus_user = "${project.findProperty('nexus_user') ?: System.getenv('NEXUS_USERNAME')}"
- nexus_pw = "${project.findProperty('nexus_pw') ?: System.getenv('NEXUS_PASSWORD')}"
- no_nexus = (project.findProperty('no_nexus') ?: System.getenv('NO_NEXUS') ?: false).toBoolean()
- if (!no_nexus && (nexus_url == "null" || nexus_user == "null" || nexus_pw == "null")) {
- throw new GradleException("property no_nexus='false' (or not defined) but at least one of the properties nexus_url, nexus_user or nexus_pw is not configured. Please configure those properties!")
- }
-
- def folderRel = (String)("${project.findProperty('nexus_folder_releases') ?: System.getenv('NEXUS_FOLDER_RELEASES')}")
- nexusFolderReleases = folderRel == "null" ? "maven-releases" : folderRel
-
- def folderSnaps = (String)("${project.findProperty('nexus_folder_snapshots') ?: System.getenv('NEXUS_FOLDER_SNAPSHOTS')}")
- nexusFolderSnapshots = folderSnaps == "null" ? "maven-snapshots" : folderSnaps
-
- snippetsDir = file('build/generated-snippets')
-}
-
-repositories {
- if (no_nexus) {
- println("using repositories 'jcenter' and 'mavenCentral', because property no_nexus=$no_nexus")
- jcenter()
- mavenCentral()
- } else {
- println("using nexus repositories")
- maven() {
- url "${nexus_url}/repository/jcenter/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
-
- maven() {
- url "${nexus_url}/repository/maven-public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
-
- maven() {
- url "${nexus_url}/repository/atlassian_public/"
- credentials {
- username = "${nexus_user}"
- password = "${nexus_pw}"
- }
- }
- }
-}
-
-apply plugin: 'groovy'
-
-dependencies {
- implementation gradleApi()
- implementation 'com.github.jengelman.gradle.plugins:shadow:5.2.0'
- implementation "org.codehaus.plexus:plexus-utils:3.0.24"
-}
\ No newline at end of file
diff --git a/buildSrc/src/main/groovy/com/github/jengelman/gradle/plugins/shadow/transformers/NewGroovyExtensionModuleTransformer.groovy b/buildSrc/src/main/groovy/com/github/jengelman/gradle/plugins/shadow/transformers/NewGroovyExtensionModuleTransformer.groovy
deleted file mode 100644
index 523fc90a..00000000
--- a/buildSrc/src/main/groovy/com/github/jengelman/gradle/plugins/shadow/transformers/NewGroovyExtensionModuleTransformer.groovy
+++ /dev/null
@@ -1,113 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.github.jengelman.gradle.plugins.shadow.transformers
-
-import shadow.org.apache.tools.zip.ZipEntry
-import shadow.org.apache.tools.zip.ZipOutputStream
-import org.codehaus.plexus.util.IOUtil
-import org.gradle.api.file.FileTreeElement
-
-/**
- * Modified from https://github.com/johnrengelman/shadow/blob/7.1.1/src/main/groovy/com/github/jengelman/gradle/plugins/shadow/transformers/GroovyExtensionModuleTransformer.groovy
- * ---
- * Modified from eu.appsatori.gradle.fatjar.tasks.PrepareFiles.groovy
- *
- * Resource transformer that merges Groovy extension module descriptor files into a single file. If there are several
- * META-INF/services/org.codehaus.groovy.runtime.ExtensionModule resources spread across many JARs the individual
- * entries will all be merged into a single META-INF/services/org.codehaus.groovy.runtime.ExtensionModule resource
- * packaged into the resultant JAR produced by the shadowing process.
- */
-@CacheableTransformer
-class NewGroovyExtensionModuleTransformer implements Transformer {
-
- private static final GROOVY_EXTENSION_MODULE_DESCRIPTOR_PATH =
- "META-INF/groovy/org.codehaus.groovy.runtime.ExtensionModule"
-
- private static final MODULE_NAME_KEY = 'moduleName'
- private static final MODULE_VERSION_KEY = 'moduleVersion'
- private static final EXTENSION_CLASSES_KEY = 'extensionClasses'
- private static final STATIC_EXTENSION_CLASSES_KEY = 'staticExtensionClasses'
-
- private static final MERGED_MODULE_NAME = 'MergedByShadowJar'
- private static final MERGED_MODULE_VERSION = '1.0.0'
-
- private final Properties module = new Properties()
-
- @Override
- boolean canTransformResource(FileTreeElement element) {
- return element.relativePath.pathString == GROOVY_EXTENSION_MODULE_DESCRIPTOR_PATH
- }
-
- @Override
- void transform(TransformerContext context) {
- def props = new Properties()
- props.load(context.is)
- props.each { String key, String value ->
- switch (key) {
- case MODULE_NAME_KEY:
- handle(key, value) {
- module.setProperty(key, MERGED_MODULE_NAME)
- }
- break
- case MODULE_VERSION_KEY:
- handle(key, value) {
- module.setProperty(key, MERGED_MODULE_VERSION)
- }
- break
- case [EXTENSION_CLASSES_KEY, STATIC_EXTENSION_CLASSES_KEY]:
- handle(key, value) { String existingValue ->
- def newValue = "${existingValue},${value}"
- module.setProperty(key, newValue)
- }
- break
- }
- }
- }
-
- private handle(String key, String value, Closure mergeValue) {
- def existingValue = module.getProperty(key)
- if (existingValue) {
- mergeValue(existingValue)
- } else {
- module.setProperty(key, value)
- }
- }
-
- @Override
- boolean hasTransformedResource() {
- return module.size() > 0
- }
-
- @Override
- void modifyOutputStream(ZipOutputStream os, boolean preserveFileTimestamps) {
- ZipEntry entry = new ZipEntry(GROOVY_EXTENSION_MODULE_DESCRIPTOR_PATH)
- entry.time = TransformerContext.getEntryTimestamp(preserveFileTimestamps, entry.time)
- os.putNextEntry(entry)
- IOUtil.copy(toInputStream(module), os)
- os.closeEntry()
- }
-
- private static InputStream toInputStream(Properties props) {
- def baos = new ByteArrayOutputStream()
- props.store(baos, null)
- return new ByteArrayInputStream(baos.toByteArray())
- }
-
-}
diff --git a/conf/META-INF/groovy/org.codehaus.groovy.runtime.ExtensionModule b/conf/META-INF/groovy/org.codehaus.groovy.runtime.ExtensionModule
deleted file mode 100644
index 7f311fd6..00000000
--- a/conf/META-INF/groovy/org.codehaus.groovy.runtime.ExtensionModule
+++ /dev/null
@@ -1,3 +0,0 @@
-moduleName=Resource Handling Extensions
-moduleVersion=1.0
-extensionClasses=util.Try
\ No newline at end of file
diff --git a/conf/application.conf b/conf/application.conf
deleted file mode 100644
index 0a5afc15..00000000
--- a/conf/application.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-# add or override properties
-# See https://github.com/typesafehub/config/blob/master/HOCON.md for more details
-application {
- port = 8080
-
- documents {
- cache {
- basePath = "/tmp/doc-gen-templates"
- }
- }
-}
-
-server {
- maxRequestSize = 200m
-
- http {
- MaxRequestSize = 200m
- }
-}
diff --git a/conf/application.test.conf b/conf/application.test.conf
deleted file mode 100644
index fe0c7f49..00000000
--- a/conf/application.test.conf
+++ /dev/null
@@ -1,7 +0,0 @@
-# add or override properties
-# See https://github.com/typesafehub/config/blob/master/HOCON.md for more details
-include "application"
-
-application {
- port = 9000
-}
diff --git a/conf/logback.xml b/conf/logback.xml
deleted file mode 100644
index bdb1e0bc..00000000
--- a/conf/logback.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-
-
-
-
- [%d{ISO8601}]-[%thread] %-5level %logger - %msg%n
-
-
-
-
-
-
-
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 0eb59e50..f72f619d 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,29 +1,9 @@
-FROM adoptopenjdk/openjdk8:ubi-jre
+# TODO https://github.com/opendevstack/ods-quickstarters/issues/766
+FROM s2obcn/jdk-11_openj9-wkhtmltopdf-ubi:main
-MAINTAINER martin.etmajer@boehringer-ingelheim.com
+RUN useradd pdfuser
-WORKDIR /app
-COPY app.jar ./app.jar
-COPY entrypoint.sh ./entrypoint.sh
-
-RUN yum update -y && \
- yum install -y libX11 libXext libXrender libjpeg xz xorg-x11-fonts-Type1 git-core
-
-# Install wkhtmltopdf
-COPY yum.repos.d/centos8.repo /etc/yum.repos.d/centos8.repo
-RUN yum install -y xorg-x11-fonts-75dpi && \
- curl -kLO https://github.com/wkhtmltopdf/wkhtmltopdf/releases/download/0.12.5/wkhtmltox-0.12.5-1.centos8.x86_64.rpm && \
- rpm -Uvh wkhtmltox-0.12.5-1.centos8.x86_64.rpm && chmod +x entrypoint.sh
+USER pdfuser
-# See https://docs.openshift.com/container-platform/3.9/creating_images/guidelines.html
-RUN chgrp -R 0 /app && \
- chmod -R g=u /app
-
-USER 1001
-
-EXPOSE 8080
-ENV JAVA_MEM_XMX="512m" \
- JAVA_MEM_XMS="128m" \
- JAVA_OPTS="-XX:+UseCompressedOops -XX:+UseG1GC -XX:+UseStringDeduplication -XX:MaxGCPauseMillis=1000"
+WORKDIR /app
-ENTRYPOINT /app/entrypoint.sh
diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh
deleted file mode 100644
index 68e9cca2..00000000
--- a/docker/entrypoint.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/bash -e
-java $JAVA_OPTS -Xms$JAVA_MEM_XMS -Xmx$JAVA_MEM_XMX -jar app.jar
diff --git a/docs/decisions/0000-use-markdown-architectural-decision-records.md b/docs/decisions/0000-use-markdown-architectural-decision-records.md
new file mode 100644
index 00000000..9ce4bb1b
--- /dev/null
+++ b/docs/decisions/0000-use-markdown-architectural-decision-records.md
@@ -0,0 +1,28 @@
+# Use Markdown Architectural Decision Records
+
+## Context and Problem Statement
+
+We want to record architectural decisions made in this project.
+Which format and structure should these records follow?
+
+## Considered Options
+
+* [MADR](https://adr.github.io/madr/) 2.1.2 The Markdown Architectural Decision Records
+* [Michael Nygard's template](http://thinkrelevance.com/blog/2011/11/15/documenting-architecture-decisions) The first incarnation of the term "ADR"
+* [Sustainable Architectural Decisions](https://www.infoq.com/articles/sustainable-architectural-design-decisions) The Y-Statements
+* Other templates listed at
+* Formless No conventions for file format and structure
+
+## Decision Outcome
+
+Chosen option: "MADR 2.1.2", because
+
+* Implicit assumptions should be made explicit.
+ Design documentation is important to enable people understanding the decisions later on.
+ See also [A rational design process: How and why to fake it](https://doi.org/10.1109/TSE.1986.6312940).
+* The MADR format is lean and fits our development style.
+* The MADR structure is comprehensible and facilitates usage & maintenance.
+* The MADR project is vivid.
+* Version 2.1.2 is the latest one available when starting to document ADRs.
+
+
\ No newline at end of file
diff --git a/docs/decisions/0001-use-spring-framework.md b/docs/decisions/0001-use-spring-framework.md
new file mode 100644
index 00000000..3da24d89
--- /dev/null
+++ b/docs/decisions/0001-use-spring-framework.md
@@ -0,0 +1,36 @@
+# Improve Maintainability
+
+* Status: accepted
+* Deciders: Sergio Sacristán
+* Date: 2021-12-22
+
+## Context and Problem Statement
+
+In order to evolve DocGen service and migrate here the LevaDoc feature, initially implemented
+in the SharedLib, we need to improve LevaDoc architecture to make it more maintainable.
+
+## Decision Drivers
+
+* Maintainability: speed up the development with better modularization of the code
+* Testability.
+* Extensibility.
+* Performance (of course we should take care of Performance, but as the API will be executed from a batch,
+we don't care if the response takes 2 seconds more or less)
+
+## Considered Options
+
+* jooby with Dagger and Groovy: poor documentation and examples
+* jooby with SpringFramework and Groovy: poor documentation and examples
+* SpringFramework and Groovy
+
+## Decision Outcome
+
+Chosen option: "SpringFramework", because:
+- It has the best integration with more frameworks, means also better extensibility
+- There's a lot of documentation and examples. Easy to solve problems
+- There's a bigger community of users: easy to involve new developers
+
+### Negative Consequences
+
+* We should do a big refactor
+
diff --git a/docs/decisions/0002-testing-strategy.md b/docs/decisions/0002-testing-strategy.md
new file mode 100644
index 00000000..8b18d4bc
--- /dev/null
+++ b/docs/decisions/0002-testing-strategy.md
@@ -0,0 +1,74 @@
+# Autoamated testing strategy
+
+* Status: accepted
+* Deciders: Sergio Sacristán
+* Date: 2021-12-22
+
+## Context and Problem Statement
+
+When unit testing a service, the standard unit is usually the service class, simple as that. The test will mock out the layer underneath in this case the DAO/DAL layer and verify the interactions on it. Exact same thing for the DAO layer mocking out the interactions with the database (HibernateTemplate in this example) and verifying the interactions with that.
+
+This is a valid approach, but it leads to brittle tests adding or removing a layer almost always means rewriting the tests entirely. This happens because the tests rely on the exact structure of the layers, and a change to that means a change to the tests.
+To avoid this kind of inflexibility, we can grow the scope of the unit test by changing the definition of the unit we can look at a persistent operation as a unit, from the Service Layer through the DAO and all the way day to the raw persistence whatever that is. Now, the unit test will consume the API of the Service Layer and will have the raw persistence mocked out in this case, the "templates.repository"
+
+## Decision Drivers
+
+* Optimize testing effort
+* Improve test quality
+
+## Decision Outcome
+
+https://github.com/portainer/portainer
+https://localhost:9443/
+admin 12345678
+
+### Positive Consequences
+
+* {e.g., improvement of quality attribute satisfaction, follow-up decisions required, …}
+* …
+
+### Negative Consequences
+
+* {e.g., compromising quality attribute, follow-up decisions required, …}
+* …
+
+## Pros and Cons of the Options
+
+### {option 1}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+### {option 2}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+### {option 3}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+## Links
+
+* {Link type} {Link to ADR}
+* …
+
+
+
+
+
+
+
diff --git a/docs/decisions/adr-template.md b/docs/decisions/adr-template.md
new file mode 100644
index 00000000..2b3a5497
--- /dev/null
+++ b/docs/decisions/adr-template.md
@@ -0,0 +1,74 @@
+# {short title of solved problem and solution}
+
+* Status: {proposed | rejected | accepted | deprecated | … | superseded by [ADR-0005](0005-example.md)}
+* Deciders: {list everyone involved in the decision}
+* Date: {YYYY-MM-DD when the decision was last updated}
+
+Technical Story: {description | ticket/issue URL}
+
+## Context and Problem Statement
+
+{Describe the context and problem statement, e.g., in free form using two to three sentences. You may want to articulate the problem in form of a question.}
+
+## Decision Drivers
+
+* {driver 1, e.g., a force, facing concern, …}
+* {driver 2, e.g., a force, facing concern, …}
+* …
+
+## Considered Options
+
+* {option 1}
+* {option 2}
+* {option 3}
+* …
+
+## Decision Outcome
+
+Chosen option: "{option 1}", because {justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force {force} | … | comes out best (see below)}.
+
+### Positive Consequences
+
+* {e.g., improvement of quality attribute satisfaction, follow-up decisions required, …}
+* …
+
+### Negative Consequences
+
+* {e.g., compromising quality attribute, follow-up decisions required, …}
+* …
+
+## Pros and Cons of the Options
+
+### {option 1}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+### {option 2}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+### {option 3}
+
+{example | description | pointer to more information | …}
+
+* Good, because {argument a}
+* Good, because {argument b}
+* Bad, because {argument c}
+* …
+
+## Links
+
+* {Link type} {Link to ADR}
+* …
+
+
\ No newline at end of file
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index f4d7b2bf..00e33ede 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/src/gatling/java/org/ods/doc/gen/GatlingRunner.java b/src/gatling/java/org/ods/doc/gen/GatlingRunner.java
new file mode 100644
index 00000000..50ffab0e
--- /dev/null
+++ b/src/gatling/java/org/ods/doc/gen/GatlingRunner.java
@@ -0,0 +1,13 @@
+package org.ods.doc.gen;
+
+import io.gatling.app.Gatling;
+import io.gatling.core.config.GatlingPropertiesBuilder;
+
+public class GatlingRunner {
+ public static void main(String[] args) {
+ GatlingPropertiesBuilder props = new GatlingPropertiesBuilder();
+ props.simulationClass(LoadSimulation.class.getName());
+ props.resultsDirectory("build/reports/gatling");
+ Gatling.fromMap(props.build());
+ }
+}
diff --git a/src/gatling/java/org/ods/doc/gen/LoadSimulation.java b/src/gatling/java/org/ods/doc/gen/LoadSimulation.java
new file mode 100644
index 00000000..0c57c294
--- /dev/null
+++ b/src/gatling/java/org/ods/doc/gen/LoadSimulation.java
@@ -0,0 +1,56 @@
+package org.ods.doc.gen;
+
+import io.gatling.javaapi.core.ScenarioBuilder;
+import io.gatling.javaapi.core.Simulation;
+import io.restassured.http.ContentType;
+import org.awaitility.Awaitility;
+
+import java.util.concurrent.TimeUnit;
+
+import static io.gatling.javaapi.core.CoreDsl.atOnceUsers;
+import static io.gatling.javaapi.core.CoreDsl.exec;
+import static io.gatling.javaapi.core.CoreDsl.global;
+import static io.gatling.javaapi.core.CoreDsl.scenario;
+import static io.gatling.javaapi.http.HttpDsl.http;
+import static io.gatling.javaapi.http.HttpDsl.status;
+import static io.restassured.RestAssured.given;
+
+public class LoadSimulation extends Simulation {
+
+ public static final String HEALTH = "http://localhost:1111/health";
+ // TODO -> change POST_PDF by "http://localhost:8080/document" and test a big doc
+ public static final String POST_PDF = "http://localhost:1111/health";
+ public static final int STATUS_CODE_OK = 200;
+
+ public static final int EXECUTION_TIMES = 100;
+
+ // Assertions: https://gatling.io/docs/gatling/reference/current/core/assertions/
+ public static final double SUCCESSFUL_REQUEST_PERCENT = 100.0;
+ public static final int MAX_RESPONSE_TIME = 400;
+ public static final int MEAN_RESPONSE_TIME = 300;
+
+ ScenarioBuilder scn = scenario( "postPDF").repeat(EXECUTION_TIMES).on(
+ exec(
+ http("POST_PDF")
+ .get(POST_PDF)
+ .asJson()
+ .check(status().is(STATUS_CODE_OK))
+ ).pause(1)
+ );
+
+ {
+ waitUntilDocGenIsUp();
+ setUp(scn.injectOpen(atOnceUsers(1))).assertions(
+ global().successfulRequests().percent().is(SUCCESSFUL_REQUEST_PERCENT),
+ global().responseTime().max().lt(MAX_RESPONSE_TIME),
+ global().responseTime().mean().lt(MEAN_RESPONSE_TIME)
+ );;
+ }
+
+ private void waitUntilDocGenIsUp() {
+ Awaitility.await().atMost(60, TimeUnit.SECONDS).pollInterval(5, TimeUnit.SECONDS).until(() ->
+ {
+ return given().contentType(ContentType.JSON).when().get(HEALTH).getStatusCode() == STATUS_CODE_OK;
+ });
+ }
+}
\ No newline at end of file
diff --git a/src/main/groovy/app/App.groovy b/src/main/groovy/app/App.groovy
deleted file mode 100644
index aee17004..00000000
--- a/src/main/groovy/app/App.groovy
+++ /dev/null
@@ -1,127 +0,0 @@
-package app
-
-import com.typesafe.config.ConfigFactory
-import groovy.util.logging.Slf4j
-import org.jooby.Jooby
-import org.jooby.MediaType
-import org.jooby.json.Jackson
-import util.DocUtils
-import util.FileTools
-
-import java.nio.file.Files
-import java.nio.file.StandardOpenOption
-
-import static groovy.json.JsonOutput.prettyPrint
-import static groovy.json.JsonOutput.toJson
-import static org.jooby.JoobyExtension.get
-import static org.jooby.JoobyExtension.post
-
-@Slf4j
-class App extends Jooby {
-
- {
-
- use(new Jackson())
- use(new DocGen())
-
- post(this, "/document", { req, rsp ->
-
- Map body = req.body().to(HashMap.class)
-
- validateRequestParams(body)
-
- if (log.isDebugEnabled()) {
- log.debug("Input request body data before send it to convert it to a pdf: ")
- log.debug(prettyPrint(toJson(body.data)))
- }
-
- FileTools.newTempFile('document', '.b64') { dataFile ->
- new DocGen().generate(body.metadata.type, body.metadata.version, body.data).withFile { pdf ->
- body = null // Not used anymore. Let it be garbage-collected.
- dataFile.withOutputStream { os ->
- Base64.getEncoder().wrap(os).withStream { encOs ->
- Files.copy(pdf, encOs)
- }
- }
- }
- def dataLength = Files.size(dataFile)
- rsp.length(dataLength + RES_PREFIX.length + RES_SUFFIX.length)
- rsp.type(MediaType.json)
- def prefixIs = new ByteArrayInputStream(RES_PREFIX)
- def suffixIs = new ByteArrayInputStream(RES_SUFFIX)
- Files.newInputStream(dataFile, StandardOpenOption.DELETE_ON_CLOSE).initResource { dataIs ->
- // Jooby is asynchronous. Upon return of the send method, the response has not necessarily
- // been sent. For this reason, we rely on Jooby to close the InputStream and the temporary file
- // will be deleted on close.
- rsp.send(new SequenceInputStream(Collections.enumeration([prefixIs, dataIs, suffixIs])))
- }
- }
-
- })
- .consumes(MediaType.json)
- .produces(MediaType.json)
-
- get(this, "/health", { req, rsp ->
- def message = null
- def status = "passing"
- def statusCode = 200
-
- try {
- FileTools.withTempFile("document", ".html") { documentHtmlFile ->
- documentHtmlFile << "document"
-
- DocGen.Util.convertHtmlToPDF(documentHtmlFile, null).withFile { pdf ->
- def header = DocUtils.getPDFHeader(pdf)
- if (header != PDF_HEADER) {
- message = "conversion form HTML to PDF failed"
- status = "failing"
- statusCode = 500
- }
- return pdf
- }
-
- }
- } catch (e) {
- message = e.message
- status = "failing"
- statusCode = 500
- }
-
- def result = [
- service: "docgen",
- status: status,
- time: new Date().toString()
- ]
-
- if (message) {
- result.message = message
- }
-
- rsp.status(statusCode).send(result)
- })
- .produces(MediaType.json)
- }
-
- private static final RES_PREFIX = '{"data":"'.getBytes('US-ASCII')
- private static final RES_SUFFIX = '"}'.getBytes('US-ASCII')
- private static final PDF_HEADER = '%PDF-1.4'
-
- private static void validateRequestParams(Map body) {
- if (body?.metadata?.type == null) {
- throw new IllegalArgumentException("missing argument 'metadata.type'")
- }
-
- if (body?.metadata?.version == null) {
- throw new IllegalArgumentException("missing argument 'metadata.version'")
- }
-
- if (body?.data == null) {
- throw new IllegalArgumentException("missing argument 'data'")
- }
- }
-
- static void main(String... args) {
- ConfigFactory.invalidateCaches()
- run(App.class, args)
- }
-}
diff --git a/src/main/groovy/app/BitBucketDocumentTemplatesStore.groovy b/src/main/groovy/app/BitBucketDocumentTemplatesStore.groovy
deleted file mode 100644
index de9728d6..00000000
--- a/src/main/groovy/app/BitBucketDocumentTemplatesStore.groovy
+++ /dev/null
@@ -1,130 +0,0 @@
-package app
-
-import feign.Response
-import feign.codec.ErrorDecoder
-import util.DocUtils
-import com.typesafe.config.Config
-import com.typesafe.config.ConfigFactory
-
-import feign.Feign
-import feign.Headers
-import feign.Param
-import feign.RequestLine
-import feign.auth.BasicAuthRequestInterceptor
-import feign.FeignException
-
-import org.apache.http.client.utils.URIBuilder
-import util.FileTools
-
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.StandardCopyOption
-
-interface BitBucketDocumentTemplatesStoreHttpAPI {
- @Headers("Accept: application/octet-stream")
- @RequestLine("GET /rest/api/latest/projects/{documentTemplatesProject}/repos/{documentTemplatesRepo}/archive?at=refs/heads/release/v{version}&format=zip")
- Response getTemplatesZipArchiveForVersion(@Param("documentTemplatesProject") String documentTemplatesProject, @Param("documentTemplatesRepo") String documentTemplatesRepo, @Param("version") String version)
-}
-
-class BitBucketDocumentTemplatesStore implements DocumentTemplatesStore {
-
- Config config
-
- // TODO: use dependency injection
- BitBucketDocumentTemplatesStore() {
- this.config = ConfigFactory.load()
- }
-
- // Get document templates of a specific version into a target directory
- Path getTemplatesForVersion(String version, Path targetDir) {
- def uri = getZipArchiveDownloadURI(version)
-
- Feign.Builder builder = Feign.builder()
-
- def bitbucketUserName = System.getenv("BITBUCKET_USERNAME")
- def bitbucketPassword = System.getenv("BITBUCKET_PASSWORD")
- if (bitbucketUserName && bitbucketPassword) {
- builder.requestInterceptor(new BasicAuthRequestInterceptor(
- bitbucketUserName, bitbucketPassword
- ))
- }
-
- BitBucketDocumentTemplatesStoreHttpAPI store = builder.target(
- BitBucketDocumentTemplatesStoreHttpAPI.class,
- uri.getScheme() + "://" + uri.getAuthority()
- )
-
-
- def bitbucketRepo = System.getenv("BITBUCKET_DOCUMENT_TEMPLATES_REPO")
- def bitbucketProject = System.getenv("BITBUCKET_DOCUMENT_TEMPLATES_PROJECT")
- try {
- return store.getTemplatesZipArchiveForVersion(
- bitbucketProject,
- bitbucketRepo,
- version
- ).withCloseable { response ->
- if (response.status() >= 300) {
- def methodKey =
- 'BitBucketDocumentTemplatesStoreHttpAPI#getTemplatesZipArchiveForVersion(String,String,String)'
- throw new ErrorDecoder.Default().decode(methodKey, response)
- }
- return FileTools.withTempFile('tmpl', 'zip') { zipArchive ->
- response.body().withCloseable { body ->
- body.asInputStream().withStream { is ->
- Files.copy(is, zipArchive, StandardCopyOption.REPLACE_EXISTING)
- }
- }
- return DocUtils.extractZipArchive(zipArchive, targetDir)
- }
- }
- } catch (FeignException callException) {
- def baseErrMessage = "Could not get document zip from '${uri}'!"
- def baseRepoErrMessage = "${baseErrMessage}\rIn repository '${bitbucketRepo}' - "
- if (callException instanceof FeignException.BadRequest) {
- throw new RuntimeException ("${baseRepoErrMessage}" +
- "is there a correct release branch configured, called 'release/v${version}'?")
- } else if (callException instanceof FeignException.Unauthorized) {
- def bbUserNameError = bitbucketUserName ?: 'Anyone'
- throw new RuntimeException ("${baseRepoErrMessage}" +
- "does '${bbUserNameError}' have access?")
- } else if (callException instanceof FeignException.NotFound) {
- throw new RuntimeException ("${baseErrMessage}" +
- "\rDoes repository '${bitbucketRepo}' in project: '${bitbucketProject}' exist?")
- } else {
- throw callException
- }
- }
- }
-
- // Get a URI to download document templates of a specific version
- URI getZipArchiveDownloadURI(String version) {
- return new URIBuilder(System.getenv("BITBUCKET_URL"))
- .setPath("/rest/api/latest/projects/${System.getenv('BITBUCKET_DOCUMENT_TEMPLATES_PROJECT')}/repos/${System.getenv('BITBUCKET_DOCUMENT_TEMPLATES_REPO')}/archive")
- .addParameter("at", "refs/heads/release/v${version}")
- .addParameter("format", "zip")
- .build()
- }
-
- boolean isApplicableToSystemConfig ()
- {
- List missingEnvs = [ ]
- if (!System.getenv("BITBUCKET_URL")) {
- missingEnvs << "BITBUCKET_URL"
- }
-
- if (!System.getenv("BITBUCKET_DOCUMENT_TEMPLATES_PROJECT")) {
- missingEnvs << "BITBUCKET_DOCUMENT_TEMPLATES_PROJECT"
- }
-
- if (!System.getenv("BITBUCKET_DOCUMENT_TEMPLATES_REPO")) {
- missingEnvs << "BITBUCKET_DOCUMENT_TEMPLATES_REPO"
- }
-
- if (missingEnvs.size() > 0) {
- println "[ERROR]: Bitbucket adapter not applicable - missing config '${missingEnvs}'"
- return false
- }
-
- return true
- }
-}
diff --git a/src/main/groovy/app/DocGen.groovy b/src/main/groovy/app/DocGen.groovy
deleted file mode 100644
index 849c783d..00000000
--- a/src/main/groovy/app/DocGen.groovy
+++ /dev/null
@@ -1,312 +0,0 @@
-package app
-
-import com.github.benmanes.caffeine.cache.Cache
-import com.github.benmanes.caffeine.cache.Caffeine
-import com.github.jknack.handlebars.Handlebars
-import com.github.jknack.handlebars.io.FileTemplateLoader
-import com.google.inject.Binder
-import com.typesafe.config.Config
-import com.typesafe.config.ConfigFactory
-import org.apache.commons.io.file.PathUtils
-import org.apache.commons.io.output.TeeOutputStream
-import org.apache.pdfbox.io.MemoryUsageSetting
-import org.apache.pdfbox.pdmodel.PDDocument
-import org.apache.pdfbox.pdmodel.PDDocumentNameDestinationDictionary
-import org.apache.pdfbox.pdmodel.PDPage
-import org.apache.pdfbox.pdmodel.common.PDNameTreeNode
-import org.apache.pdfbox.pdmodel.interactive.action.PDActionGoTo
-import org.apache.pdfbox.pdmodel.interactive.documentnavigation.destination.PDPageDestination
-import org.apache.pdfbox.pdmodel.interactive.annotation.PDAnnotationLink
-import org.apache.pdfbox.pdmodel.interactive.documentnavigation.outline.PDOutlineNode
-import util.FileTools
-
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.Paths
-import java.nio.file.StandardCopyOption
-import java.time.Duration
-
-import org.apache.commons.io.FilenameUtils
-import org.jooby.Env
-import org.jooby.Jooby
-
-class DocGen implements Jooby.Module {
-
- Config config
- Cache templatesCache
-
- // TODO: use dependency injection
- DocGen() {
- this.config = ConfigFactory.load()
-
- this.templatesCache = Caffeine.newBuilder()
- .expireAfterWrite(Duration.ofDays(1))
- .removalListener({ key, graph, cause ->
- def path = getPathForTemplatesVersion(key)
- if (Files.exists(path)) {
- PathUtils.deleteDirectory(path)
- }
- })
- .build()
- }
-
- void configure(Env env, Config config, Binder binder) {
- }
-
- // Get document templates for a specific version
- private Path getTemplates(def version) {
- DocumentTemplatesStore store = new BitBucketDocumentTemplatesStore()
- if (!store.isApplicableToSystemConfig()) {
- store = new GithubDocumentTemplatesStore()
- }
- println ("Using templates @${store.getZipArchiveDownloadURI(version)}")
-
- def path = templatesCache.getIfPresent(version)
- if (path == null) {
- path = store.getTemplatesForVersion(version, getPathForTemplatesVersion(version))
- templatesCache.put(version, path)
- }
-
- return path
- }
-
- // Generate a PDF document for a combination of template type, version and data
- Path generate(String type, String version, Object data) {
- // Copy the templates directory including with any assets into a temporary location
- return FileTools.withTempDir("${type}-v${version}") { tmpDir ->
- PathUtils.copyDirectory(getTemplates(version), tmpDir, StandardCopyOption.REPLACE_EXISTING)
-
- // Get partial templates from the temporary location and manipulate as needed
- def partials = getPartialTemplates(tmpDir, type)
-
- // Transform paths to partial templates to paths to rendered HTML files
- partials = partials.collectEntries { name, path ->
- // Write an .html file next to the .tmpl file containing the executed template
- def htmlFile = Paths.get(FilenameUtils.removeExtension(path.toString()))
- Util.executeTemplate(path, htmlFile, data)
- return [ name, htmlFile ]
- }
-
- // Convert the executed templates into a PDF document
- return Util.convertHtmlToPDF(partials.document, data)
- }
- }
-
- // Read partial templates for a template type and version from the basePath directory
- private static Map getPartialTemplates(Path basePath, String type) {
- def partials = [
- document: Paths.get(basePath.toString(), "templates", "${type}.html.tmpl"),
- header: Paths.get(basePath.toString(), "templates", "header.inc.html.tmpl"),
- footer: Paths.get(basePath.toString(), "templates", "footer.inc.html.tmpl")
- ]
-
- partials.each { name, path ->
- // Check if the partial template exists
- if (!Files.exists(path)) {
- throw new FileNotFoundException("could not find required template part '${name}' at '${path}'")
- }
-
- FileTools.newTempFile("${name}_tmpl") { tmp ->
- path.withReader { reader ->
- tmp.withWriter { writer ->
- reader.eachLine { line ->
- def replaced = line.replaceAll('\t', '')
- writer.write(replaced)
- }
- }
- }
- Files.move(tmp, path, StandardCopyOption.REPLACE_EXISTING)
- }
- }
-
- return partials
- }
-
- // Get a path to a directory holding document templates for a specific version
- private Path getPathForTemplatesVersion(String version) {
- return Paths.get(this.config.getString("application.documents.cache.basePath"), version)
- }
-
- class Util {
- // Execute a document template with the necessary data
- static private void executeTemplate(Path path, Path dest, Object data) {
- // TODO: throw if template variables are not provided
- def loader = new FileTemplateLoader("", "")
- dest.withWriter { writer ->
- new Handlebars(loader)
- .compile(path.toString())
- .apply(data, writer)
- }
- }
-
- // Convert a HTML document, with an optional header and footer, into a PDF
- static Path convertHtmlToPDF(Path documentHtmlFile, Object data) {
- def cmd = ["wkhtmltopdf", "--encoding", "UTF-8", "--no-outline", "--print-media-type"]
- cmd << "--enable-local-file-access"
- cmd.addAll(["-T", "40", "-R", "25", "-B", "25", "-L", "25"])
-
- if (data?.metadata?.header) {
- if (data.metadata.header.size() > 1) {
- cmd.addAll(["--header-center", """${data.metadata.header[0]}
-${data.metadata.header[1]}"""])
- } else {
- cmd.addAll(["--header-center", data.metadata.header[0]])
- }
-
- cmd.addAll(["--header-font-size", "10", "--header-spacing", "10"])
- }
-
- cmd.addAll(["--footer-center", "'Page [page] of [topage]'", "--footer-font-size", "10"])
-
- if (data?.metadata?.orientation) {
- cmd.addAll(["--orientation", data.metadata.orientation])
- }
-
- cmd << documentHtmlFile.toAbsolutePath().toString()
-
- return FileTools.newTempFile("document", ".pdf") { documentPDFFile ->
- cmd << documentPDFFile.toAbsolutePath().toString()
-
- println "[INFO]: executing cmd: ${cmd}"
-
- def result = shell(cmd)
- if (result.rc != 0) {
- println "[ERROR]: ${cmd} has exited with code ${result.rc}"
- println "[ERROR]: ${result.stderr}"
- throw new IllegalStateException(
- "PDF Creation of ${documentHtmlFile} failed!\r:${result.stderr}\r:Error code:${result.rc}")
- }
-
- fixDestinations(documentPDFFile.toFile())
- }
- }
-
- // Execute a command in the shell
- static private Map shell(List cmd) {
-
- def proc = cmd.execute()
- def stderr = null
- def rc = FileTools.withTempFile("shell", ".stderr") { tempFile ->
- tempFile.withOutputStream { tempFileOutputStream ->
- new TeeOutputStream(System.err, tempFileOutputStream).withStream { errOutputStream ->
- proc.waitForProcessOutput(System.out, errOutputStream)
- }
- }
- def exitValue = proc.exitValue()
- if (exitValue) {
- stderr = tempFile.text
- }
- return exitValue
- }
-
- return [
- rc: rc,
- stderr: stderr
- ]
- }
-
- private static final long MAX_MEMORY_TO_FIX_DESTINATIONS = 8192L
-
- /**
- * Fixes malformed PDF documents which use page numbers in local destinations, referencing the same document.
- * Page numbers should be used only for references to external documents.
- * These local destinations must use indirect page object references.
- * Note that these malformed references are not correctly renumbered when merging documents.
- * This method finds these malformed references and replaces the page numbers by the corresponding
- * page object references.
- * If the document is not malformed, this method will leave it unchanged.
- *
- * @param pdf a PDF file.
- */
- private static void fixDestinations(File pdf) {
- def memoryUsageSetting = MemoryUsageSetting.setupMixed(MAX_MEMORY_TO_FIX_DESTINATIONS)
- PDDocument.load(pdf, memoryUsageSetting).withCloseable { doc ->
- fixDestinations(doc)
- doc.save(pdf)
- }
- }
-
- /**
- * Fixes malformed PDF documents which use page numbers in local destinations, referencing the same document.
- * Page numbers should be used only for references to external documents.
- * These local destinations must use indirect page object references.
- * Note that these malformed references are not correctly renumbered when merging documents.
- * This method finds these malformed references and replaces the page numbers by the corresponding
- * page object references.
- * If the document is not malformed, this method will leave it unchanged.
- *
- * @param doc a PDF document.
- */
- private static void fixDestinations(PDDocument doc) {
- def pages = doc.pages as List // Accessing pages by index is slow. This will make it fast.
- fixExplicitDestinations(pages)
- def catalog = doc.documentCatalog
- fixNamedDestinations(catalog, pages)
- fixOutline(catalog, pages)
- }
-
- private static fixExplicitDestinations(pages) {
- pages.each { page ->
- page.getAnnotations { it instanceof PDAnnotationLink }.each { link ->
- fixDestinationOrAction(link, pages)
- }
- }
- }
-
- private static fixNamedDestinations(catalog, pages) {
- fixStringDestinations(catalog.names?.dests, pages)
- fixNameDestinations(catalog.dests, pages)
- }
-
- private static fixOutline(catalog, pages) {
- def outline = catalog.documentOutline
- if (outline != null) {
- fixOutlineNode(outline, pages)
- }
- }
-
- private static fixStringDestinations(PDNameTreeNode node, pages) {
- if (node) {
- node.names?.each { name, dest -> fixDestination(dest, pages) }
- node.kids?.each { fixStringDestinations(it, pages) }
- }
- }
-
- private static fixNameDestinations(PDDocumentNameDestinationDictionary dests, pages) {
- dests?.COSObject?.keySet()*.name.each { name ->
- def dest = dests.getDestination(name)
- if (dest instanceof PDPageDestination) {
- fixDestination(dest, pages)
- }
- }
- }
-
- private static fixOutlineNode(PDOutlineNode node, pages) {
- node.children().each { item ->
- fixDestinationOrAction(item, pages)
- fixOutlineNode(item, pages)
- }
- }
-
- private static fixDestinationOrAction(item, pages) {
- def dest = item.destination
- if (dest == null) {
- def action = item.action
- if (action instanceof PDActionGoTo) {
- dest = action.destination
- }
- }
- if (dest instanceof PDPageDestination) {
- fixDestination(dest, pages)
- }
- }
-
- private static fixDestination(PDPageDestination dest, List pages) {
- def pageNum = dest.pageNumber
- if (pageNum != -1) {
- dest.setPage(pages[pageNum])
- }
- }
-
- }
-}
diff --git a/src/main/groovy/app/DocumentTemplatesStore.groovy b/src/main/groovy/app/DocumentTemplatesStore.groovy
deleted file mode 100644
index 6854ed09..00000000
--- a/src/main/groovy/app/DocumentTemplatesStore.groovy
+++ /dev/null
@@ -1,14 +0,0 @@
-package app
-
-import java.nio.file.Path
-
-interface DocumentTemplatesStore {
-
- // Get document templates of a specific version into a target directory
- Path getTemplatesForVersion(String version, Path targetDir)
-
- // Get a URI to download document templates of a specific version
- URI getZipArchiveDownloadURI(String version)
-
- boolean isApplicableToSystemConfig ()
-}
diff --git a/src/main/groovy/app/GithubDocumentTemplatesStore.groovy b/src/main/groovy/app/GithubDocumentTemplatesStore.groovy
deleted file mode 100644
index 27d1dfc5..00000000
--- a/src/main/groovy/app/GithubDocumentTemplatesStore.groovy
+++ /dev/null
@@ -1,94 +0,0 @@
-package app
-
-import feign.Response
-import feign.codec.ErrorDecoder
-import okhttp3.OkHttpClient
-import org.apache.http.client.utils.URIBuilder
-import com.typesafe.config.Config
-import com.typesafe.config.ConfigFactory
-import feign.Feign
-import feign.Headers
-import feign.Param
-import feign.RequestLine
-import util.DocUtils
-import util.FileTools
-
-import java.nio.file.Files
-import java.nio.file.Path
-import java.nio.file.StandardCopyOption
-
-interface GithubDocumentTemplatesStoreHttpAPI {
- @Headers("Accept: application/octet-stream")
- @RequestLine("GET /opendevstack/ods-document-generation-templates/archive/v{version}.zip")
- Response getTemplatesZipArchiveForVersion(@Param("version") String version)
-}
-
-class GithubDocumentTemplatesStore implements DocumentTemplatesStore {
-
- Config config
-
- // TODO: use dependency injection
- GithubDocumentTemplatesStore() {
- this.config = ConfigFactory.load()
- }
-
- // Get document templates of a specific version into a target directory
- Path getTemplatesForVersion(String version, Path targetDir) {
- def uri = getZipArchiveDownloadURI(version)
- Feign.Builder builder = createBuilder()['builder']
-
- GithubDocumentTemplatesStoreHttpAPI store = builder.target(
- GithubDocumentTemplatesStoreHttpAPI.class,
- uri.getScheme() + "://" + uri.getAuthority()
- )
-
- return store.getTemplatesZipArchiveForVersion(version).withCloseable { response ->
- if (response.status() >= 300) {
- def methodKey =
- 'GithubDocumentTemplatesStoreHttpAPI#getTemplatesZipArchiveForVersion(String)'
- throw new ErrorDecoder.Default().decode(methodKey, response)
- }
- return FileTools.withTempFile('tmpl', 'zip') { zipArchive ->
- response.body().withCloseable { body ->
- body.asInputStream().withStream { is ->
- Files.copy(is, zipArchive, StandardCopyOption.REPLACE_EXISTING)
- }
- }
- return DocUtils.extractZipArchive(
- zipArchive, targetDir, "ods-document-generation-templates-${version}")
- }
- }
- }
-
- // Get a URI to download document templates of a specific version
- URI getZipArchiveDownloadURI(String version) {
- // for testing
- String githubUrl = System.getenv("GITHUB_HOST") ?: "https://www.github.com"
- return new URIBuilder(githubUrl)
- .setPath("/opendevstack/ods-document-generation-templates/archive/v${version}.zip")
- .build()
- }
-
- // proxy setup, we return a map for testing
- Map createBuilder () {
- String[] httpProxyHost = System.getenv('HTTP_PROXY')?.trim()?.replace('http://','')?.split(':')
- println ("Proxy setup: ${httpProxyHost ?: 'not found' }")
- if (httpProxyHost && !System.getenv("GITHUB_HOST")) {
- int httpProxyPort = httpProxyHost.size() == 2 ? Integer.parseInt(httpProxyHost[1]) : 80
- Proxy proxy = new Proxy(Proxy.Type.HTTP,
- new InetSocketAddress(httpProxyHost[0], httpProxyPort))
- OkHttpClient okHttpClient = new OkHttpClient().newBuilder().proxy(proxy).build()
- return [
- 'builder': Feign.builder().client(new feign.okhttp.OkHttpClient(okHttpClient)),
- 'proxy' : proxy
- ]
- } else {
- return ['builder' : Feign.builder()]
- }
- }
-
- boolean isApplicableToSystemConfig ()
- {
- return true
- }
-}
diff --git a/src/main/groovy/org/jooby/JoobyExtension.groovy b/src/main/groovy/org/jooby/JoobyExtension.groovy
deleted file mode 100644
index c5d24be5..00000000
--- a/src/main/groovy/org/jooby/JoobyExtension.groovy
+++ /dev/null
@@ -1,32 +0,0 @@
-package org.jooby
-
-import org.jooby.Jooby
-import org.jooby.Route
-
-/** Example on how to hack Groovy so we can use groovy closure on script routes. */
-class JoobyExtension {
-
- private static Route.Filter toHandler(Closure closure) {
- if (closure.maximumNumberOfParameters == 0) {
- Route.ZeroArgHandler handler = { closure() }
- return handler
- } else if (closure.maximumNumberOfParameters == 1) {
- Route.OneArgHandler handler = { req -> closure(req) }
- return handler
- } else if (closure.maximumNumberOfParameters == 2) {
- Route.Handler handler = { req, rsp -> closure(req, rsp) }
- return handler
- }
-
- Route.Filter handler = { req, rsp, chain -> closure(req, rsp, chain) }
- return handler
- }
-
- static Route.Definition get(Jooby self, String pattern, Closure closure) {
- return self.get(pattern, toHandler(closure));
- }
-
- static Route.Definition post(Jooby self, String pattern, Closure closure) {
- return self.post(pattern, toHandler(closure));
- }
-}
diff --git a/src/main/groovy/org/ods/doc/gen/App.groovy b/src/main/groovy/org/ods/doc/gen/App.groovy
new file mode 100644
index 00000000..5db372bb
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/App.groovy
@@ -0,0 +1,13 @@
+package org.ods.doc.gen
+
+import org.springframework.boot.SpringApplication
+import org.springframework.boot.autoconfigure.SpringBootApplication
+
+@SpringBootApplication
+class App {
+
+ static void main(String... args) {
+ SpringApplication.run(App.class, args)
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/AppConfiguration.groovy b/src/main/groovy/org/ods/doc/gen/AppConfiguration.groovy
new file mode 100644
index 00000000..631f5f0d
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/AppConfiguration.groovy
@@ -0,0 +1,64 @@
+package org.ods.doc.gen
+
+import com.github.benmanes.caffeine.cache.Caffeine
+import org.apache.commons.io.FileUtils
+import org.springframework.beans.factory.annotation.Value
+import org.springframework.cache.annotation.EnableCaching
+import org.springframework.cache.caffeine.CaffeineCache
+import org.springframework.cloud.openfeign.EnableFeignClients
+import org.springframework.context.annotation.Bean
+import org.springframework.context.annotation.Configuration
+
+import java.nio.file.Paths
+import java.time.Clock
+import java.time.Duration
+
+@EnableCaching
+@EnableFeignClients
+@Configuration
+class AppConfiguration {
+
+ private static final String TEMPLATES = "templates"
+ private static final String TEMPORAL_FOLDER = "temporalFolder"
+ private static final String PROJECT_DATA = "projectData"
+ public static final int DAYS_IN_CACHE = 1
+
+ @Bean
+ Clock aClockBeanToMockInTesting() {
+ return Clock.systemDefaultZone()
+ }
+
+ @Bean
+ CaffeineCache caffeineTemplatesFolder(@Value('${cache.documents.basePath}') String basePath) {
+ FileUtils.deleteDirectory(Paths.get(basePath).toFile())
+ return new CaffeineCache(
+ TEMPLATES,
+ Caffeine.newBuilder()
+ .expireAfterWrite(Duration.ofDays(DAYS_IN_CACHE))
+ .removalListener({ version, graph, cause ->
+ FileUtils.deleteDirectory(Paths.get(basePath, version as String).toFile())
+ }).build()
+ )
+ }
+
+ @Bean
+ CaffeineCache caffeineTemporalFolder() {
+ return new CaffeineCache(
+ TEMPORAL_FOLDER,
+ Caffeine.newBuilder()
+ .expireAfterWrite(Duration.ofDays(DAYS_IN_CACHE))
+ .removalListener({ id, graph, cause ->
+ FileUtils.deleteDirectory(Paths.get(id as String).toFile())
+ }).build()
+ )
+ }
+
+ @Bean
+ CaffeineCache caffeineProjectDataConfig(@Value('${cache.projectData.expiration.minutes}') Long expirationMinutes) {
+ return new CaffeineCache(
+ PROJECT_DATA,
+ Caffeine.newBuilder().expireAfterWrite(Duration.ofMinutes(expirationMinutes)).build()
+ )
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/BitBucketClientConfig.groovy b/src/main/groovy/org/ods/doc/gen/BitBucketClientConfig.groovy
new file mode 100644
index 00000000..7ea42bcb
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/BitBucketClientConfig.groovy
@@ -0,0 +1,42 @@
+package org.ods.doc.gen
+
+import feign.Feign
+import feign.Logger
+import feign.auth.BasicAuthRequestInterceptor
+import feign.slf4j.Slf4jLogger
+import groovy.util.logging.Slf4j
+import okhttp3.OkHttpClient
+import org.apache.http.client.utils.URIBuilder
+import org.ods.doc.gen.adapters.git.BitBucketRepository
+import org.springframework.beans.factory.annotation.Value
+import org.springframework.stereotype.Service
+
+@Slf4j
+@Service
+class BitBucketClientConfig {
+
+ final String username
+ private final String password
+ String url
+
+ BitBucketClientConfig(@Value('${bitbucket.username}') String username,
+ @Value('${bitbucket.password}') String password,
+ @Value('${bitbucket.url}') String url){
+ log.info("BitBucketClientConfig - url:[${url}], username:[${username}]")
+
+ this.password = password
+ this.username = username
+ this.url = url
+ }
+
+ BitBucketRepository getClient() {
+ URI baseUrl = new URIBuilder(url).build()
+ Feign.Builder builder = Feign.builder()
+ builder.requestInterceptor(new BasicAuthRequestInterceptor(username, password))
+ feign.okhttp.OkHttpClient client = new feign.okhttp.OkHttpClient(new OkHttpClient().newBuilder().build())
+ return builder.client(client).logger(new Slf4jLogger(BitBucketRepository.class))
+ .logLevel(Logger.Level.BASIC)
+ .target(BitBucketRepository.class, baseUrl.getScheme() + "://" + baseUrl.getAuthority())
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/GithubClientConfig.groovy b/src/main/groovy/org/ods/doc/gen/GithubClientConfig.groovy
new file mode 100644
index 00000000..0b6260a9
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/GithubClientConfig.groovy
@@ -0,0 +1,42 @@
+package org.ods.doc.gen
+
+import feign.Feign
+import feign.Logger
+import feign.slf4j.Slf4jLogger
+import groovy.util.logging.Slf4j
+import okhttp3.OkHttpClient
+import org.ods.doc.gen.adapters.git.GitHubRepository
+import org.springframework.beans.factory.annotation.Value
+import org.springframework.stereotype.Service
+
+@Slf4j
+@Service
+class GithubClientConfig {
+
+ String url
+ String[] httpProxyHost
+
+ GithubClientConfig(@Value('${github.url}') String url){
+ log.info("GithubClientConfig - url:[${url}]")
+
+ this.url = url
+ httpProxyHost = System.getenv('HTTP_PROXY')?.trim()?.replace('http://', '')?.split(':')
+ }
+
+ GitHubRepository getClient() {
+ feign.okhttp.OkHttpClient client
+ if (httpProxyHost) {
+ log.info ("Proxy setup: ${httpProxyHost}")
+ int httpProxyPort = httpProxyHost.size() == 2 ? Integer.parseInt(httpProxyHost[1]) : 80
+ Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(httpProxyHost[0], httpProxyPort))
+ client = new feign.okhttp.OkHttpClient(new OkHttpClient().newBuilder().proxy(proxy).build())
+ } else {
+ client = new feign.okhttp.OkHttpClient(new OkHttpClient().newBuilder().build())
+ }
+ URI baseUrl = URI.create(url)
+ return Feign.builder().client(client).logger(new Slf4jLogger(GitHubRepository.class))
+ .logLevel(Logger.Level.BASIC)
+ .target(GitHubRepository.class, baseUrl.getScheme() + "://" + baseUrl.getAuthority())
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/SpringContext.groovy b/src/main/groovy/org/ods/doc/gen/SpringContext.groovy
new file mode 100644
index 00000000..8ca4acc2
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/SpringContext.groovy
@@ -0,0 +1,27 @@
+package org.ods.doc.gen
+
+import org.springframework.beans.BeansException
+import org.springframework.context.ApplicationContext
+import org.springframework.context.ApplicationContextAware
+import org.springframework.stereotype.Component
+
+@Component
+class SpringContext implements ApplicationContextAware {
+
+ private static ApplicationContext context;
+
+ /**
+ * Returns the Spring managed bean instance of the given class type if it exists.
+ * Returns null otherwise.
+ * @param beanClass
+ * @return
+ */
+ static T getBean(Class beanClass) {
+ return context.getBean(beanClass);
+ }
+
+ @Override
+ void setApplicationContext(ApplicationContext context) throws BeansException {
+ SpringContext.context = context;
+ }
+}
\ No newline at end of file
diff --git a/src/main/groovy/org/ods/doc/gen/StartupApplicationListener.groovy b/src/main/groovy/org/ods/doc/gen/StartupApplicationListener.groovy
new file mode 100644
index 00000000..cae42e09
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/StartupApplicationListener.groovy
@@ -0,0 +1,24 @@
+package org.ods.doc.gen
+
+import groovy.util.logging.Slf4j
+import kong.unirest.Unirest
+import org.springframework.context.ApplicationListener
+import org.springframework.context.event.ContextRefreshedEvent
+import org.springframework.stereotype.Component
+
+import javax.annotation.PostConstruct
+
+@Slf4j
+@Component
+class StartupApplicationListener implements ApplicationListener {
+
+ @Override void onApplicationEvent(ContextRefreshedEvent event) {
+ log.trace(event.toString())
+ }
+
+ @PostConstruct
+ void init() {
+ Unirest.config().reset().socketTimeout(6000000).connectTimeout(600000).verifySsl(false)
+ }
+
+}
\ No newline at end of file
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/git/BitBucketRepository.groovy b/src/main/groovy/org/ods/doc/gen/adapters/git/BitBucketRepository.groovy
new file mode 100644
index 00000000..bbe2f942
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/git/BitBucketRepository.groovy
@@ -0,0 +1,31 @@
+package org.ods.doc.gen.adapters.git
+
+import feign.Headers
+import feign.Param
+import feign.RequestLine
+import feign.Response
+
+interface BitBucketRepository {
+
+ // TODO by config limit=1
+ static final int PAGE_LIMIT = 10
+
+ @Headers("Accept: application/json")
+ @RequestLine("GET /rest/api/latest/projects/{project}/repos/{repo}/commits?limit=10&start={start}")
+ String getCommitsForDefaultBranch(@Param("project") String project,
+ @Param("repo") String repo,
+ @Param("start") int start)
+
+ @Headers("Accept: application/json")
+ @RequestLine("GET /rest/api/latest/projects/{project}/repos/{repo}/commits/{commit}/pull-requests")
+ String getPRforMergedCommit(@Param("project") String project,
+ @Param("repo") String repo,
+ @Param("commit") String commit)
+
+ @Headers("Accept: application/octet-stream")
+ @RequestLine("GET /rest/api/latest/projects/{project}/repos/{repo}/archive?at={branch}&format=zip")
+ Response getRepoZipArchive(@Param("project") String project,
+ @Param("repo") String repo,
+ @Param("branch") String branch)
+
+}
\ No newline at end of file
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/git/BitbucketService.groovy b/src/main/groovy/org/ods/doc/gen/adapters/git/BitbucketService.groovy
new file mode 100644
index 00000000..dc0ee92e
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/git/BitbucketService.groovy
@@ -0,0 +1,113 @@
+package org.ods.doc.gen.adapters.git
+
+import feign.FeignException
+import feign.Response
+import feign.codec.ErrorDecoder
+import groovy.json.JsonSlurperClassic
+import groovy.util.logging.Slf4j
+import org.ods.doc.gen.BitBucketClientConfig
+import org.ods.doc.gen.core.ZipFacade
+import org.ods.doc.gen.project.data.ProjectData
+import org.springframework.stereotype.Service
+
+import javax.inject.Inject
+import java.nio.file.Files
+import java.nio.file.Path
+import java.nio.file.Paths
+import java.nio.file.StandardCopyOption
+
+@Slf4j
+@Service
+class BitbucketService {
+
+ private static final String MAIN_BRANCH = "master"
+
+ private ZipFacade zipFacade
+ private final BitBucketClientConfig bitBucketClientConfig
+
+ @Inject
+ BitbucketService(BitBucketClientConfig bitBucketClientConfig,
+ ZipFacade zipFacade) {
+ this.bitBucketClientConfig = bitBucketClientConfig
+ this.zipFacade = zipFacade
+ }
+
+ Map getCommitsForIntegrationBranch(String repo, ProjectData projectData, int nextPageStart){
+ String projectKey = projectData.getKey()
+ String response = bitBucketClientConfig.getClient().getCommitsForDefaultBranch(projectKey, repo, nextPageStart)
+ return new JsonSlurperClassic().parseText(response)
+ }
+
+ Map getPRforMergedCommit(String repo, ProjectData projectData, String commit) {
+ String projectKey = projectData.getKey()
+ String response = bitBucketClientConfig.getClient().getPRforMergedCommit(projectKey, repo, commit)
+ return new JsonSlurperClassic().parseText(response)
+ }
+
+ String buildReleaseManagerUrl(String projectId, String releaseManagerRepo) {
+ URI uri = new URI([getBitbucketURLForDocs(), projectId, releaseManagerRepo].join("/"))
+ return "${uri.normalize().toString()}.git"
+ }
+
+ String getBitbucketURLForDocs() {
+ return bitBucketClientConfig.url
+ }
+
+ void downloadRepo(String project, String repo, String branch, String tmpFolder) {
+ log.info("downloadRepo: project:${project}, repo:${repo} and branch:${branch}")
+ Path zipArchive = Files.createTempFile("archive-", ".zip")
+ try {
+ downloadRepoWithFallBack(project, repo, branch, zipArchive)
+ zipFacade.extractZipArchive(zipArchive, Paths.get(tmpFolder))
+ } catch (FeignException callException) {
+ checkError(repo, branch, callException)
+ } finally {
+ Files.delete(zipArchive)
+ }
+ }
+
+ protected void downloadRepoWithFallBack(String project, String repo, String branch, Path zipArchive) {
+ try {
+ bitBucketClientConfig
+ .getClient()
+ .getRepoZipArchive(project, repo, branch)
+ .withCloseable { Response response ->
+ streamResult(response, zipArchive)
+ }
+ } catch (Exception callException) {
+ log.warn("Branch [${branch}] doesn't exist, using branch: [${MAIN_BRANCH}]")
+ bitBucketClientConfig
+ .getClient()
+ .getRepoZipArchive(project, repo, MAIN_BRANCH)
+ .withCloseable { Response response ->
+ streamResult(response, zipArchive)
+ }
+ }
+
+ }
+
+ protected void streamResult(Response response, Path zipArchive){
+ if (response.status() >= 300) {
+ throw new ErrorDecoder.Default().decode('downloadRepo', response)
+ }
+ response.body().withCloseable { body ->
+ body.asInputStream().withStream { is ->
+ Files.copy(is, zipArchive, StandardCopyOption.REPLACE_EXISTING)
+ }
+ }
+ }
+
+ protected void checkError(repo, String branch, FeignException callException) {
+ def baseErrMessage = "Could not get document zip from '${repo}'!- For version:${branch}"
+ if (callException instanceof FeignException.BadRequest) {
+ throw new RuntimeException("${baseErrMessage} \rIs there a correct release branch configured?", callException)
+ } else if (callException instanceof FeignException.Unauthorized) {
+ throw new RuntimeException("${baseErrMessage} \rDoes '${bitBucketClientConfig.username}' have access?", callException)
+ } else if (callException instanceof FeignException.NotFound) {
+ throw new RuntimeException("${baseErrMessage}", callException)
+ } else {
+ throw callException
+ }
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/git/GitHubRepository.groovy b/src/main/groovy/org/ods/doc/gen/adapters/git/GitHubRepository.groovy
new file mode 100644
index 00000000..6782bc00
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/git/GitHubRepository.groovy
@@ -0,0 +1,14 @@
+package org.ods.doc.gen.adapters.git
+
+import feign.Headers
+import feign.Param
+import feign.RequestLine
+import feign.Response
+
+interface GitHubRepository {
+
+ @Headers("Accept: application/octet-stream")
+ @RequestLine("GET /opendevstack/ods-document-generation-templates/archive/v{version}.zip")
+ Response getTemplatesZip(@Param("version") String version)
+
+}
\ No newline at end of file
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/git/GithubService.groovy b/src/main/groovy/org/ods/doc/gen/adapters/git/GithubService.groovy
new file mode 100644
index 00000000..708c124e
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/git/GithubService.groovy
@@ -0,0 +1,70 @@
+package org.ods.doc.gen.adapters.git
+
+import feign.FeignException
+import feign.Response
+import feign.codec.ErrorDecoder
+import groovy.util.logging.Slf4j
+import org.ods.doc.gen.GithubClientConfig
+import org.ods.doc.gen.core.ZipFacade
+import org.springframework.stereotype.Service
+
+import javax.inject.Inject
+import java.nio.file.Files
+import java.nio.file.Path
+import java.nio.file.StandardCopyOption
+
+@Slf4j
+@Service
+class GithubService {
+
+ private ZipFacade zipFacade
+ private final GithubClientConfig githubClientConfig
+
+ @Inject
+ GithubService(GithubClientConfig githubClientConfig,
+ ZipFacade zipFacade) {
+ this.githubClientConfig = githubClientConfig
+ this.zipFacade = zipFacade
+ }
+
+ void downloadRepo(String version, Path tmpFolder) {
+ Path zipArchive = Files.createTempFile("archive-", ".zip")
+ try {
+ githubClientConfig
+ .getClient()
+ .getTemplatesZip(version)
+ .withCloseable { Response response ->
+ streamResult(response, zipArchive)
+ }
+ zipFacade.extractZipArchive(zipArchive, tmpFolder)
+ } catch (FeignException callException) {
+ checkError(version, callException)
+ } finally {
+ Files.delete(zipArchive)
+ }
+ }
+
+ private void streamResult(Response response, Path zipArchive){
+ if (response.status() >= 300) {
+ throw new ErrorDecoder.Default().decode('downloadTemplates', response)
+ }
+ response.body().withCloseable { body ->
+ body.asInputStream().withStream { is ->
+ Files.copy(is, zipArchive, StandardCopyOption.REPLACE_EXISTING)
+ }
+ }
+ }
+
+ private void checkError(String version, FeignException callException) {
+ def baseErrMessage = "Could not get document zip from GH - For version:${version}"
+ if (callException instanceof FeignException.BadRequest) {
+ throw new RuntimeException("FeignException.BadRequest ${baseErrMessage} \r" +
+ "Is there a correct release branch configured?")
+ } else if (callException instanceof FeignException.NotFound) {
+ throw new RuntimeException("FeignException.NotFound: ${baseErrMessage}")
+ } else {
+ throw callException
+ }
+ }
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/jira/CustomIssueFields.groovy b/src/main/groovy/org/ods/doc/gen/adapters/jira/CustomIssueFields.groovy
new file mode 100644
index 00000000..8d641dc2
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/jira/CustomIssueFields.groovy
@@ -0,0 +1,10 @@
+package org.ods.doc.gen.adapters.jira;
+
+class CustomIssueFields {
+
+ static final String CONTENT = 'EDP Content'
+ static final String HEADING_NUMBER = 'EDP Heading Number'
+ static final String DOCUMENT_VERSION = 'Document Version'
+ static final String RELEASE_VERSION = 'ProductRelease Version'
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/jira/IssueTypes.groovy b/src/main/groovy/org/ods/doc/gen/adapters/jira/IssueTypes.groovy
new file mode 100644
index 00000000..8db5bfc6
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/jira/IssueTypes.groovy
@@ -0,0 +1,9 @@
+package org.ods.doc.gen.adapters.jira;
+
+class IssueTypes {
+
+ static final String DOCUMENTATION_TRACKING = 'Documentation'
+ static final String DOCUMENTATION_CHAPTER = 'Documentation Chapter'
+ static final String RELEASE_STATUS = 'Release Status'
+
+}
diff --git a/src/main/groovy/org/ods/doc/gen/adapters/jira/JiraService.groovy b/src/main/groovy/org/ods/doc/gen/adapters/jira/JiraService.groovy
new file mode 100644
index 00000000..09ae7da6
--- /dev/null
+++ b/src/main/groovy/org/ods/doc/gen/adapters/jira/JiraService.groovy
@@ -0,0 +1,345 @@
+package org.ods.doc.gen.adapters.jira
+
+import groovy.json.JsonOutput
+import groovy.json.JsonSlurperClassic
+import groovy.util.logging.Slf4j
+import kong.unirest.Unirest
+import org.apache.http.client.utils.URIBuilder
+import org.ods.doc.gen.core.StringCleanup
+import org.ods.doc.gen.core.URLHelper
+import org.springframework.beans.factory.annotation.Value
+import org.springframework.stereotype.Service
+
+@SuppressWarnings(['LineLength', 'ParameterName'])
+@Slf4j
+@Service
+class JiraService {
+
+ protected static Map CHARACTER_REMOVEABLE = ['\u00A0': ' ',]
+
+ URI baseURL
+ URI targetURL // this is because baseURL can be modificated by Wiremok
+ String username
+ String password
+
+ JiraService(@Value('${jira.url}') String baseURL,
+ @Value('${jira.username}') String username,
+ @Value('${jira.password}') String password) {
+ log.info("JiraService - url:[${baseURL}], username:[${username}]")
+ if (!baseURL?.trim() || baseURL=="null") {
+ throw new IllegalArgumentException('Error: unable to connect to Jira. \'baseURL\' is undefined.')
+ }
+
+ if (!username?.trim()) {
+ throw new IllegalArgumentException('Error: unable to connect to Jira. \'username\' is undefined.')
+ }
+
+ if (!password?.trim()) {
+ throw new IllegalArgumentException('Error: unable to connect to Jira. \'password\' is undefined.')
+ }
+
+ if (baseURL.endsWith('/')) {
+ baseURL = baseURL.substring(0, baseURL.size() - 1)
+ }
+
+ try {
+ this.baseURL = new URIBuilder(baseURL).build()
+ this.targetURL = new URIBuilder(baseURL).build()
+ } catch (e) {
+ throw new IllegalArgumentException("Error: unable to connect to Jira. '${baseURL}' is not a valid URI.").initCause(e)
+ }
+
+ this.username = username
+ this.password = password
+ }
+
+
+ void appendCommentToIssue(String issueIdOrKey, String comment) {
+ if (!issueIdOrKey?.trim()) {
+ throw new IllegalArgumentException('Error: unable to append comment to Jira issue. \'issueIdOrKey\' is undefined.')
+ }
+
+ if (!comment?.trim()) {
+ throw new IllegalArgumentException('Error: unable to append comment to Jira issue. \'comment\' is undefined.')
+ }
+
+ def response = Unirest.post("${this.baseURL}/rest/api/2/issue/{issueIdOrKey}/comment")
+ .routeParam("issueIdOrKey", issueIdOrKey)
+ .basicAuth(this.username, this.password)
+ .header("Accept", "application/json")
+ .header("Content-Type", "application/json")
+ .body(JsonOutput.toJson(
+ [body: comment]
+ ))
+ .asString()
+
+ response.ifFailure {
+ def message = "Error: unable to append comment to Jira issue. Jira responded with code: '${response.getStatus()}' and message: '${response.getBody()}'."
+
+ if (response.getStatus() == 404) {
+ message = "Error: unable to append comment to Jira issue. Jira could not be found at: '${this.baseURL}'."
+ }
+
+ throw new RuntimeException(message)
+ }
+ }
+
+ Map getDeltaDocGenData(String projectKey, String version) {
+ if (!projectKey?.trim()) {
+ throw new IllegalArgumentException('Error: unable to get documentation generation data from Jira. ' +
+ '\'projectKey\' is undefined.')
+ }
+
+ def response = Unirest.get("${this.baseURL}/rest/platform/1.1/deltadocgenreports/{projectKey}/{version}")
+ .routeParam("projectKey", projectKey.toUpperCase())
+ .routeParam("version", version)
+ .basicAuth(this.username, this.password)
+ .header("Accept", "application/json")
+ .asString()
+
+ response.ifFailure {
+ def message = 'Error: unable to get documentation generation data. Jira responded with code: ' +
+ "'${response.getStatus()}' and message: '${response.getBody()}'."
+
+ if (response.getStatus() == 404) {
+ message = 'Error: unable to get documentation generation data. ' +
+ "Jira could not be found at: '${this.baseURL}'."
+ }
+
+ throw new RuntimeException(message)
+ }
+
+ return new JsonSlurperClassic().parseText(StringCleanup.removeCharacters(response.getBody(), CHARACTER_REMOVEABLE))
+ }
+
+
+ Map getFileFromJira(String url) {
+ def response = Unirest.get(url)
+ .basicAuth(this.username, this.password)
+ .asBytes()
+
+ response.ifFailure {
+ def message = "Error: unable to get file from Jira. Jira responded with code: '${response.getStatus()}' and message: '${response.getBody()}'."
+
+ if (response.getStatus() == 404) {
+ message = "Error: unable to get file from Jira. Jira could not be found at: '${this.baseURL}'."
+ }
+
+ throw new RuntimeException(message)
+ }
+
+ return [
+ contentType: response.getHeaders()["Content-Type"][0],
+ data: response.getBody()
+ ]
+ }
+
+ private String changeRLWhenUsingWiremock(String url) {
+ String finalUrl = (baseURL == targetURL)? url : URLHelper.replaceHostInUrl(url, baseURL.toString())
+ return finalUrl
+ }
+
+ List getIssuesForJQLQuery(Map query) {
+ return searchByJQLQuery(query).issues
+ }
+
+
+ Map getIssueTypeMetadata(String projectKey, String issueTypeId) {
+ if (!projectKey?.trim()) {
+ throw new IllegalArgumentException('Error: unable to get Jira issue type metadata. \'projectKey\' is undefined.')
+ }
+
+ if (!issueTypeId?.trim()) {
+ throw new IllegalArgumentException('Error: unable to get Jira issue type metadata. \'issueTypeId\' is undefined.')
+ }
+
+ def response = Unirest.get("${this.baseURL}/rest/api/2/issue/createmeta/{projectKey}/issuetypes/{issueTypeId}")
+ .routeParam('projectKey', projectKey.toUpperCase())
+ .routeParam('issueTypeId', issueTypeId)
+ .basicAuth(this.username, this.password)
+ .header('Accept', 'application/json')
+ .asString()
+
+ response.ifFailure {
+ def message = "Error: unable to get Jira issue type metadata. Jira responded with code: '${response.getStatus()}' and message: '${response.getBody()}'."
+
+ if (response.getStatus() == 404) {
+ message = "Error: unable to get Jira issue type metadata. Jira could not be found at: '${this.baseURL}'."
+ }
+
+ throw new RuntimeException(message)
+ }
+
+ return new JsonSlurperClassic().parseText(response.getBody())
+ }
+
+
+ Map getIssueTypes(String projectKey) {
+ if (!projectKey?.trim()) {
+ throw new IllegalArgumentException('Error: unable to get Jira issue types. \'projectKey\' is undefined.')
+ }
+
+ def response = Unirest.get("${this.baseURL}/rest/api/2/issue/createmeta/{projectKey}/issuetypes")
+ .routeParam('projectKey', projectKey.toUpperCase())
+ .basicAuth(this.username, this.password)
+ .header('Accept', 'application/json')
+ .asString()
+
+ response.ifFailure {
+ def message = "Error: unable to get Jira issue types. Jira responded with code: '${response.getStatus()}' and message: '${response.getBody()}'."
+
+ if (response.getStatus() == 404) {
+ message = "Error: unable to get Jira issue types. Jira could not be found at: '${this.baseURL}'."
+ }
+
+ throw new RuntimeException(message)
+ }
+
+ return new JsonSlurperClassic().parseText(response.getBody())
+ }
+
+
+ List