diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 86e0744..3b91d02 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,16 +12,16 @@ jobs: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 ref: ${{ github.event.inputs.branch }} - name: Set up JDK - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: - distribution: 'adopt' - java-version: '8' + distribution: 'corretto' + java-version: '21' java-package: jdk server-id: central # Value of the distributionManagement/repository/id field of the pom.xml server-username: SONATYPE_USERNAME # env variable for username in deploy diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d04c3bd..12749f4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,7 +11,7 @@ jobs: name: Package and run all tests runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Init Coveralls @@ -28,10 +28,10 @@ jobs: fi echo "COVERALLS_SKIP=${COVERALLS_SKIP}" >> $GITHUB_ENV - name: Set up JDK - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: - distribution: 'adopt' - java-version: 8 + distribution: 'corretto' + java-version: '21' java-package: jdk # this creates a settings.xml with the following server settings-path: ${{ github.workspace }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7a85b0c..e92c0ab 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -13,7 +13,7 @@ jobs: steps: - name: Checkout source code - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: fetch-depth: 0 ref: ${{ github.event.inputs.branch }} @@ -21,10 +21,10 @@ jobs: token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }} - name: Set up JDK - uses: actions/setup-java@v2 + uses: actions/setup-java@v4 with: - distribution: 'adopt' - java-version: '8' + distribution: 'corretto' + java-version: '21' java-package: jdk server-id: central # Value of the distributionManagement/repository/id field of the pom.xml server-username: SONATYPE_USERNAME # env variable for username in deploy diff --git a/.gitignore b/.gitignore index 1b13689..2a43afb 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,4 @@ core/dependency-reduced-pom.xml lib/* *.jar +CLAUDE.md diff --git a/JAVA21_MIGRATION.md b/JAVA21_MIGRATION.md new file mode 100644 index 0000000..d896e40 --- /dev/null +++ b/JAVA21_MIGRATION.md @@ -0,0 +1,277 @@ +# Java 21 Migration — Change Log + +This document describes all changes made to migrate drone-fly from Java 8 to Java 21 +and fix the integration tests that broke as a result of the Hive 4.x upgrade. + +--- + +## Branch + +`Upgrade_Java_21` (personal fork) / `feature/EGDL_7736_Upgrade_Java_21` (upstream) + +--- + +## Commit 1 — `33b6f95` Migrate project to Java 21 + +### Dependencies upgraded + +| Dependency | Before | After | +|---|---|---| +| Java / JDK | 8 | 21 | +| Spring Boot | 2.7.10 | 3.2.12 (Spring Framework 6.1) | +| Hive Metastore | 2.3.9 | 4.0.1 | +| Apiary Extensions (`kafka-metastore-receiver`) | 6.0.2 | 8.1.15 | +| Hadoop client runtime | — | 3.3.6 (added; provides shaded WoodStox XML parser) | +| Guava | 27.1-jre | 33.4.0-jre | +| MSK IAM Auth | 1.1.9 | 2.2.0 | +| Spotless Maven plugin | 2.4.1 | 2.43.0 (google-java-format 1.19.2, Java 21 compatible) | +| JaCoCo | 0.8.6 | 0.8.12 | +| Surefire | 3.0.0-M5 | 3.2.5 | +| Docker base image | `openjdk:8-jdk` | `amazoncorretto:21-al2023` | + +Dropped explicit version pins for Logback, Log4j, JUnit, Mockito, AssertJ, and +Dropwizard — these are now managed by the Spring Boot BOM. + +### Build / tooling changes + +- `pom.xml`: set `jdk.version=21`, `maven.compiler.release=21` +- `.mvn/jvm.config`: added `--add-exports` flags required by google-java-format + on JDK 17+ +- Surefire `argLine`: added `--add-opens` for Hadoop / Hive / Mockito runtime + reflection +- Jib container config: added `--add-opens` JVM flags for Hadoop / Hive runtime +- GitHub Actions workflows (`.github/workflows/*.yml`): upgraded to Java 21, + `corretto` distribution, `actions/checkout@v4`, `actions/setup-java@v4` +- `DataSourceAutoConfiguration` excluded to suppress spurious JDBC auto-config + pulled in by Hive transitive dependencies + +### Source code changes + +| File | Change | +|---|---| +| `DroneFly.java` | No logic change; re-formatted | +| `DroneFlyRunner.java` | `javax.annotation.PreDestroy` → `jakarta.annotation.PreDestroy` | +| `CommonBeans.java` | `javax.annotation.PreDestroy` → `jakarta.annotation.PreDestroy` | +| `HMSHandlerFactory.java` | Import updated: `HiveMetaStore.HMSHandler` → `HMSHandler` (Hive 4.x moved it to a top-level class) | +| `HiveEventConverterService.java` | Updated all event constructors for Hive 4.x API changes (see table below) | +| `ListenerCatalog.java` | `JavaUtils` package rename in Hive 4.x | +| `ListenerCatalogFactory.java` | `commons-lang` → `commons-lang3` (`StringUtils`) | +| `LoggingMetastoreListener.java` | Re-formatted | +| `DroneFlyIntegrationTest.java` | `org.awaitility.Duration` → `java.time.Duration`; import ordering | +| `DroneFlyIntegrationTestUtils.java` | Re-formatted | +| `DummyListener.java` (integration) | Re-formatted | + +### Hive 4.x event constructor changes in `HiveEventConverterService` + +| Event | Hive 3.x constructor | Hive 4.x constructor | +|---|---|---| +| `CreateTableEvent` | `(Table, boolean, HMSHandler)` | `(Table, boolean, IHMSHandler, boolean isReplicated)` | +| `AlterTableEvent` | `(oldTable, newTable, isTruncateOp, status, handler)` | `(oldTable, newTable, isTruncateOp, status, writeId, handler, isReplicated)` | +| `AlterPartitionEvent` | shorter signature | `(oldPart, newPart, table, status, isTruncateOp, writeId, handler)` | +| `DropTableEvent` | `(table, status, deleteData, handler)` | `(table, status, deleteData, handler, isReplicated)` | +| `InsertEvent` | shorter signature | `(dbName, catName, tableName, partVals, insertData, status, handler)` | + +--- + +## Commit 2 — `91cdcca` Fix integration tests: add Hive 3.x/4.x compatibility shims + +### Problem + +`apiary-hive-events:8.1.15` (consumed via `kafka-metastore-receiver`) was compiled +against Hive 3.x. At runtime with `hive-metastore:4.0.1` three layers of binary +incompatibility caused the integration test to fail. + +### Layer 1 — `NoSuchMethodError`: missing 3-arg `CreateTableEvent` constructor + +**Root cause:** +`JsonMetaStoreEventSerDe$HeplerApiaryListenerEvent` has a static initializer that +calls `new CreateTableEvent(null, false, (HiveMetaStore.HMSHandler) null)`. +Two things are wrong in Hive 4.x: + +1. `HiveMetaStore.HMSHandler` (inner class) no longer exists — `HMSHandler` is now + a standalone top-level class. +2. The constructor signature changed to + `CreateTableEvent(Table, boolean, IHMSHandler, boolean isReplicated)`. + +**Fix — two shim classes added to `drone-fly-app/src/main/java/`:** + +`org/apache/hadoop/hive/metastore/HiveMetaStore.java` +- Re-introduces `HiveMetaStore` with an inner `HMSHandler` class that extends the + new standalone `org.apache.hadoop.hive.metastore.HMSHandler`. + +`org/apache/hadoop/hive/metastore/events/CreateTableEvent.java` +- Provides both constructors: + - 3-arg (Hive 3.x): `(Table, boolean, HiveMetaStore.HMSHandler)` — satisfies the + apiary static initializer. + - 4-arg (Hive 4.x): `(Table, boolean, IHMSHandler, boolean)` — used by + `HiveEventConverterService`. +- Extends `ListenerEvent` directly and implements `getTable()` / `isReplicated()`. + +Because these classes live in `drone-fly-app/target/classes/`, they appear on the +classpath before the Hive jars and shadow the Hive versions at runtime. + +### Layer 2 — Jackson `InvalidDefinitionException`: conflicting `ByteBuffer` setters + +**Root cause:** +Hive 4.x added an inline `colStats: ColumnStatistics` field to `Table`. When Jackson +builds a deserializer for `Table` (triggered during Kafka event deserialization), it +recursively introspects `ColumnStatistics` → `ColumnStatisticsData` → all 8 column +stats variant types. Each of these Thrift-generated Hive 4.x classes exposes both a +`setX(byte[])` and a `setX(ByteBuffer)` setter for binary properties (`bitVectors`, +`histogram`, `unscaled`). Jackson treats these as conflicting definitions for the same +property and throws `InvalidDefinitionException` before any data is read. + +**Fix — 9 shim classes added to `drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/`:** + +| Shim class | Conflicting fields removed | +|---|---| +| `BooleanColumnStatsData` | `setBitVectors(ByteBuffer)` | +| `LongColumnStatsData` | `setBitVectors(ByteBuffer)`, `setHistogram(ByteBuffer)` | +| `DoubleColumnStatsData` | `setBitVectors(ByteBuffer)`, `setHistogram(ByteBuffer)` | +| `StringColumnStatsData` | `setBitVectors(ByteBuffer)` | +| `BinaryColumnStatsData` | `setBitVectors(ByteBuffer)` | +| `DecimalColumnStatsData` | `setBitVectors(ByteBuffer)`, `setHistogram(ByteBuffer)` | +| `DateColumnStatsData` | `setBitVectors(ByteBuffer)`, `setHistogram(ByteBuffer)` | +| `TimestampColumnStatsData` | `setBitVectors(ByteBuffer)`, `setHistogram(ByteBuffer)` | +| `Decimal` | `setUnscaled(ByteBuffer)` | + +Each shim is a minimal plain Java bean (`Serializable`, no-arg constructor, +`byte[]`-only setters) that shadows the Hive 4.x Thrift-generated class on the +classpath. drone-fly only deserializes these objects from JSON via Jackson; it never +performs Thrift I/O on them, so the omission of Thrift plumbing is safe. + +### Layer 3 — Assertion failure: new Hive 4.x `Partition` fields + +**Root cause:** +Hive 4.x `Partition` gained two new optional fields: `writeId` (default `-1`) and +`isStatsCompliant` (default `false`). These fields are serialized to JSON by +`KafkaMetaStoreEventListener` and deserialized back by drone-fly, setting the Thrift +`isSet` flags to `true`. The expected `Partition` built by `buildPartition()` had the +same values but with `isSet = false`, causing `Partition.equals()` to return `false`. + +**Fix:** +`DroneFlyIntegrationTestUtils.buildPartition()` now explicitly calls +`partition.setWriteId(-1)` and `partition.setIsStatsCompliant(false)` so the expected +object's `isSet` flags match the deserialized object. + +--- + +## Commit 3 — Fix `ClassNotFoundException` for external listener JARs in Jib-built images + +### Problem + +Child images that extend `drone-fly-app` (e.g. `egdp-docker-glue-sync-listener`) download a +listener JAR into `/app/libs` at Dockerfile build time: + +```dockerfile +FROM expediagroup/drone-fly-app:1.0.9-SNAPSHOT +RUN cd /app/libs && curl ... apiary-gluesync-listener-8.1.13-all.jar +``` + +After upgrading to Java 21 / Hive 4.x, the container failed on startup with: + +``` +Caused by: java.lang.ClassNotFoundException: + com.expediagroup.apiary.extensions.gluesync.listener.ApiaryGlueSync + at jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(...) +``` + +### Root cause — two compounding changes + +**Change 1 — Hive 2.x → 4.x changed `JavaUtils.getClassLoader()` behavior** + +| | Hive 2.x | Hive 4.x | +|---|---|---| +| Package | `org.apache.hadoop.hive.common.JavaUtils` | `org.apache.hadoop.hive.metastore.utils.JavaUtils` | +| `getClassLoader()` returns | `Thread.currentThread().getContextClassLoader()` | JVM `AppClassLoader` | + +In a Spring Boot fat-jar launched with `PropertiesLauncher` and `loader.path=lib/`, the thread +context classloader is Spring Boot's `LaunchedURLClassLoader`, which can find JARs placed in +`loader.path`. Hive 2.x `JavaUtils` used this classloader, so external listener JARs were +visible. + +Hive 4.x `JavaUtils` returns the plain JVM `AppClassLoader`, which only sees the `-cp` +argument set at JVM startup — not dynamically placed JARs. + +**Change 2 — Jib bakes the classpath at image-build time** + +Jib generates an `ENTRYPOINT` with an **explicit list** of dependency JARs in the `-cp` +argument (determined at `mvn package` time). JARs downloaded by a child Dockerfile's `RUN` +step land on the filesystem in `/app/libs` but are **never added** to that hardcoded classpath. + +Before the migration this was masked: the Hive 2.x `JavaUtils` used `LaunchedURLClassLoader` +(via PropertiesLauncher), which resolved listener JARs through `loader.path` independently of +the Jib classpath. After the migration both defences were removed simultaneously. + +| | Before migration | After migration | +|---|---|---| +| `JavaUtils.getClassLoader()` returns | `LaunchedURLClassLoader` (sees `loader.path`) | `AppClassLoader` (sees only `-cp`) | +| Listener JAR on classpath? | ✅ Yes (via `loader.path`) | ❌ No (not in Jib `-cp`) | + +### Fix — override `ENTRYPOINT` in child Dockerfile with a wildcard classpath + +`egdp-docker-glue-sync-listener/Dockerfile` was updated to override the Jib-baked entrypoint +with one that uses `/app/libs/*`. The JVM expands the wildcard at **startup time**, picking up +every JAR present in `/app/libs/` — including those downloaded by the `RUN curl` step: + +```dockerfile +ENTRYPOINT ["java", + "--add-opens=java.base/java.lang=ALL-UNNAMED", + "--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + "--add-opens=java.base/java.io=ALL-UNNAMED", + "--add-opens=java.base/java.net=ALL-UNNAMED", + "--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + "--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + "--add-opens=java.base/java.security=ALL-UNNAMED", + "-cp", "/app/resources:/app/classes:/app/libs/*", + "com.expediagroup.dataplatform.dronefly.app.DroneFly"] +``` + +The `--add-opens` flags match those in the Jib `` configuration in the parent +`pom.xml` so runtime Hadoop/Hive reflection behaviour is preserved. + +> **Note for other child images:** Any Dockerfile that extends `drone-fly-app` and adds JARs +> to `/app/libs` must include this `ENTRYPOINT` override to ensure those JARs are on the +> classpath. + +--- + +## Files changed (summary) + +``` +.github/workflows/build.yml +.github/workflows/main.yml +.github/workflows/release.yml +.mvn/jvm.config [new] +drone-fly-app/pom.xml +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/messaging/MessageReaderAdapter.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationService.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java +drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/listener/LoggingMetastoreListener.java +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java[new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/Decimal.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java[new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java [new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/TimestampColumnStatsData.java[new - compat shim] +drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/events/CreateTableEvent.java [new - compat shim] +drone-fly-app/src/test/java/... (unit tests updated for Hive 4.x API) +drone-fly-core/src/main/java/... (re-formatted) +drone-fly-integration-tests/pom.xml +drone-fly-integration-tests/src/test/java/.../DroneFlyIntegrationTest.java +drone-fly-integration-tests/src/test/java/.../DroneFlyIntegrationTestUtils.java +drone-fly-integration-tests/src/test/java/.../DummyListener.java +pom.xml +``` diff --git a/drone-fly-app/pom.xml b/drone-fly-app/pom.xml index 0351076..18cea34 100644 --- a/drone-fly-app/pom.xml +++ b/drone-fly-app/pom.xml @@ -14,7 +14,7 @@ 1.11.532 0.2.5 - 2.3.9 + 4.0.1 8008 @@ -27,7 +27,7 @@ com.expediagroup.apiary kafka-metastore-receiver - 6.0.2 + 8.1.15 jdk.tools @@ -53,6 +53,10 @@ org.apache.geronimo.specs geronimo-jaspic_1.0_spec + + org.apache.hive + hive-metastore + @@ -60,12 +64,35 @@ micrometer-registry-prometheus - org.apache.httpcomponents - httpclient + org.apache.hive + hive-metastore + ${hive.version} + + + junit + junit + + + org.eclipse.jetty.aggregate + jetty-all + + + org.eclipse.jetty.orbit + javax.servlet + + + javax.servlet + servlet-api + + + jdk.tools + jdk.tools + + org.apache.hive - hive-metastore + hive-standalone-metastore-server ${hive.version} @@ -88,6 +115,26 @@ jdk.tools jdk.tools + + org.slf4j + slf4j-log4j12 + + + org.apache.logging.log4j + log4j-slf4j-impl + + + tomcat + jasper-compiler + + + tomcat + jasper-runtime + + + org.apache.hadoop + hadoop-common + @@ -97,7 +144,7 @@ com.google.guava guava - 27.1-jre + 33.4.0-jre software.amazon.msk @@ -107,7 +154,17 @@ io.dropwizard.metrics metrics-core - ${dropwizard.version} + + + org.apache.hadoop + hadoop-client-runtime + 3.4.2 + + + org.apache.hadoop + hadoop-mapreduce-client-core + 3.4.2 + test org.springframework diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java index aa6f172..d217828 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java @@ -19,6 +19,7 @@ import org.springframework.beans.BeansException; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.boot.builder.SpringApplicationBuilder; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; @@ -27,7 +28,7 @@ import com.google.common.annotations.VisibleForTesting; -@SpringBootApplication +@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class}) @EnableConfigurationProperties public class DroneFly implements ApplicationContextAware { diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java index f291c26..e29d48b 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java @@ -18,7 +18,7 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; -import javax.annotation.PreDestroy; +import jakarta.annotation.PreDestroy; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java index bc0df3e..620319d 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java @@ -78,10 +78,10 @@ public ListenerCatalog listenerCatalog(HiveConf conf) throws MetaException { @Bean public MessageReaderAdapter messageReaderAdapter() { Properties consumerProperties = getConsumerProperties(); - KafkaMessageReader delegate = KafkaMessageReaderBuilder. - builder(bootstrapServers, topicName, instanceName). - withConsumerProperties(consumerProperties). - build(); + KafkaMessageReader delegate = KafkaMessageReaderBuilder + .builder(bootstrapServers, topicName, instanceName) + .withConsumerProperties(consumerProperties) + .build(); return new MessageReaderAdapter(delegate); } @@ -93,4 +93,5 @@ private Properties getConsumerProperties() { }); return consumerProperties; } -} \ No newline at end of file + +} diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java index 5a9e3c5..26e081e 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java @@ -75,7 +75,7 @@ public ListenerEvent toHiveEvent(ApiaryListenerEvent serializableHiveEvent) case ON_ALTER_PARTITION: { ApiaryAlterPartitionEvent alterPartition = (ApiaryAlterPartitionEvent) serializableHiveEvent; hiveEvent = new AlterPartitionEvent(alterPartition.getOldPartition(), alterPartition.getNewPartition(), - alterPartition.getTable(), alterPartition.getStatus(), hmsHandlerFactory.newInstance()); + alterPartition.getTable(), alterPartition.getStatus(), false, null, hmsHandlerFactory.newInstance()); break; } case ON_DROP_PARTITION: { @@ -87,19 +87,19 @@ public ListenerEvent toHiveEvent(ApiaryListenerEvent serializableHiveEvent) case ON_CREATE_TABLE: { ApiaryCreateTableEvent createTableEvent = (ApiaryCreateTableEvent) serializableHiveEvent; hiveEvent = new CreateTableEvent(createTableEvent.getTable(), createTableEvent.getStatus(), - hmsHandlerFactory.newInstance()); + hmsHandlerFactory.newInstance(), false); break; } case ON_ALTER_TABLE: { ApiaryAlterTableEvent alterTableEvent = (ApiaryAlterTableEvent) serializableHiveEvent; hiveEvent = new AlterTableEvent(alterTableEvent.getOldTable(), alterTableEvent.getNewTable(), - alterTableEvent.getStatus(), hmsHandlerFactory.newInstance()); + false, alterTableEvent.getStatus(), null, hmsHandlerFactory.newInstance(), false); break; } case ON_DROP_TABLE: { ApiaryDropTableEvent dropTable = (ApiaryDropTableEvent) serializableHiveEvent; hiveEvent = new DropTableEvent(dropTable.getTable(), dropTable.getStatus(), dropTable.getDeleteData(), - hmsHandlerFactory.newInstance()); + hmsHandlerFactory.newInstance(), false); break; } @@ -116,7 +116,7 @@ public ListenerEvent toHiveEvent(ApiaryListenerEvent serializableHiveEvent) InsertEventRequestData insertEventRequestData = new InsertEventRequestData(insert.getFiles()); insertEventRequestData.setFilesAddedChecksum(insert.getFileChecksums()); - hiveEvent = new InsertEvent(insert.getDatabaseName(), insert.getTableName(), partVals, insertEventRequestData, + hiveEvent = new InsertEvent(insert.getDatabaseName(), null, insert.getTableName(), partVals, insertEventRequestData, insert.getStatus(), hmsHandlerFactory.newInstance()); break; } diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java index bf4b3f6..6fbe25d 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java @@ -26,7 +26,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.common.JavaUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreEventListener; @@ -62,7 +61,7 @@ private List getMetaStoreListeners(Class clazz, HiveConf conf, String for (String listenerImpl : listenerImpls) { try { T listener = (T) Class - .forName(listenerImpl.trim(), true, JavaUtils.getClassLoader()) + .forName(listenerImpl.trim(), true, Thread.currentThread().getContextClassLoader()) .getConstructor(Configuration.class) .newInstance(conf); listeners.add(listener); diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java index 3687899..f8c9cb9 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java @@ -16,7 +16,7 @@ package com.expediagroup.dataplatform.dronefly.app.service.factory; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.api.MetaException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -31,6 +31,6 @@ public HMSHandlerFactory(HiveConf hiveConf) { } public HMSHandler newInstance() throws MetaException { - return new HMSHandler("drone-fly", hiveConf, false); + return new HMSHandler("drone-fly", hiveConf); } } diff --git a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java index bc3e061..4e5da28 100644 --- a/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java +++ b/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java @@ -15,7 +15,7 @@ */ package com.expediagroup.dataplatform.dronefly.app.service.factory; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hive.conf.HiveConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java new file mode 100644 index 0000000..c192232 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java @@ -0,0 +1,34 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.metastore.api.MetaException; + +/** + * Compatibility shim that re-introduces {@code HiveMetaStore.HMSHandler} as an inner class. In Hive + * 4.x, {@code HMSHandler} became a standalone top-level class and the inner class was removed. + * Libraries compiled against Hive 3.x (e.g. apiary-hive-events <= 8.1.15) still reference {@code + * HiveMetaStore$HMSHandler}, so this shim restores it. + */ +public class HiveMetaStore { + + public static class HMSHandler extends org.apache.hadoop.hive.metastore.HMSHandler { + public HMSHandler(String name, Configuration conf) throws MetaException { + super(name, conf); + } + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java new file mode 100644 index 0000000..3745899 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BinaryColumnStatsData.java @@ -0,0 +1,65 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code BinaryColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, which causes Jackson + * {@code InvalidDefinitionException}. This shim exposes only the {@code byte[]} setter. + */ +public class BinaryColumnStatsData implements Serializable { + + private long maxColLen; + private double avgColLen; + private long numNulls; + private byte[] bitVectors; + + public BinaryColumnStatsData() {} + + public long getMaxColLen() { + return maxColLen; + } + + public void setMaxColLen(long maxColLen) { + this.maxColLen = maxColLen; + } + + public double getAvgColLen() { + return avgColLen; + } + + public void setAvgColLen(double avgColLen) { + this.avgColLen = avgColLen; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java new file mode 100644 index 0000000..ad4e5d8 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/BooleanColumnStatsData.java @@ -0,0 +1,66 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code BooleanColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, which causes Jackson to + * throw {@code InvalidDefinitionException: Conflicting setter definitions}. This shim exposes only + * the {@code byte[]} setter, resolving the conflict. + */ +public class BooleanColumnStatsData implements Serializable { + + private long numTrues; + private long numFalses; + private long numNulls; + private byte[] bitVectors; + + public BooleanColumnStatsData() {} + + public long getNumTrues() { + return numTrues; + } + + public void setNumTrues(long numTrues) { + this.numTrues = numTrues; + } + + public long getNumFalses() { + return numFalses; + } + + public void setNumFalses(long numFalses) { + this.numFalses = numFalses; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java new file mode 100644 index 0000000..da2096b --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DateColumnStatsData.java @@ -0,0 +1,84 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code DateColumnStatsData}. The Hive 4.x Thrift-generated class has both + * {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, and similarly for {@code + * setHistogram}, which causes Jackson {@code InvalidDefinitionException}. This shim exposes only + * the {@code byte[]} setters, resolving the conflict. + */ +public class DateColumnStatsData implements Serializable { + + private Date lowValue; + private Date highValue; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + private byte[] histogram; + + public DateColumnStatsData() {} + + public Date getLowValue() { + return lowValue; + } + + public void setLowValue(Date lowValue) { + this.lowValue = lowValue; + } + + public Date getHighValue() { + return highValue; + } + + public void setHighValue(Date highValue) { + this.highValue = highValue; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } + + public byte[] getHistogram() { + return histogram; + } + + public void setHistogram(byte[] histogram) { + this.histogram = histogram; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/Decimal.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/Decimal.java new file mode 100644 index 0000000..9ddcb18 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/Decimal.java @@ -0,0 +1,47 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code Decimal}. The Hive 4.x Thrift-generated class has both {@code + * setUnscaled(byte[])} and {@code setUnscaled(ByteBuffer)}, which causes Jackson {@code + * InvalidDefinitionException}. This shim exposes only the {@code byte[]} setter. + */ +public class Decimal implements Serializable { + + private short scale; + private byte[] unscaled; + + public Decimal() {} + + public short getScale() { + return scale; + } + + public void setScale(short scale) { + this.scale = scale; + } + + public byte[] getUnscaled() { + return unscaled; + } + + public void setUnscaled(byte[] unscaled) { + this.unscaled = unscaled; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java new file mode 100644 index 0000000..5715158 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DecimalColumnStatsData.java @@ -0,0 +1,84 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code DecimalColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, and similarly for + * {@code setHistogram}, which causes Jackson {@code InvalidDefinitionException}. This shim exposes + * only the {@code byte[]} setters, resolving the conflict. + */ +public class DecimalColumnStatsData implements Serializable { + + private Decimal lowValue; + private Decimal highValue; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + private byte[] histogram; + + public DecimalColumnStatsData() {} + + public Decimal getLowValue() { + return lowValue; + } + + public void setLowValue(Decimal lowValue) { + this.lowValue = lowValue; + } + + public Decimal getHighValue() { + return highValue; + } + + public void setHighValue(Decimal highValue) { + this.highValue = highValue; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } + + public byte[] getHistogram() { + return histogram; + } + + public void setHistogram(byte[] histogram) { + this.histogram = histogram; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java new file mode 100644 index 0000000..86b2a28 --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/DoubleColumnStatsData.java @@ -0,0 +1,84 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code DoubleColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, and similarly for + * {@code setHistogram}, which causes Jackson {@code InvalidDefinitionException}. This shim exposes + * only the {@code byte[]} setters, resolving the conflict. + */ +public class DoubleColumnStatsData implements Serializable { + + private double lowValue; + private double highValue; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + private byte[] histogram; + + public DoubleColumnStatsData() {} + + public double getLowValue() { + return lowValue; + } + + public void setLowValue(double lowValue) { + this.lowValue = lowValue; + } + + public double getHighValue() { + return highValue; + } + + public void setHighValue(double highValue) { + this.highValue = highValue; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } + + public byte[] getHistogram() { + return histogram; + } + + public void setHistogram(byte[] histogram) { + this.histogram = histogram; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java new file mode 100644 index 0000000..c0e4d1d --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/LongColumnStatsData.java @@ -0,0 +1,84 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code LongColumnStatsData}. The Hive 4.x Thrift-generated class has both + * {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, and similarly for {@code + * setHistogram}, which causes Jackson {@code InvalidDefinitionException}. This shim exposes only + * the {@code byte[]} setters, resolving the conflict. + */ +public class LongColumnStatsData implements Serializable { + + private long lowValue; + private long highValue; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + private byte[] histogram; + + public LongColumnStatsData() {} + + public long getLowValue() { + return lowValue; + } + + public void setLowValue(long lowValue) { + this.lowValue = lowValue; + } + + public long getHighValue() { + return highValue; + } + + public void setHighValue(long highValue) { + this.highValue = highValue; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } + + public byte[] getHistogram() { + return histogram; + } + + public void setHistogram(byte[] histogram) { + this.histogram = histogram; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java new file mode 100644 index 0000000..cf2c42b --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/StringColumnStatsData.java @@ -0,0 +1,74 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code StringColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, which causes Jackson + * {@code InvalidDefinitionException}. This shim exposes only the {@code byte[]} setter. + */ +public class StringColumnStatsData implements Serializable { + + private long maxColLen; + private double avgColLen; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + + public StringColumnStatsData() {} + + public long getMaxColLen() { + return maxColLen; + } + + public void setMaxColLen(long maxColLen) { + this.maxColLen = maxColLen; + } + + public double getAvgColLen() { + return avgColLen; + } + + public void setAvgColLen(double avgColLen) { + this.avgColLen = avgColLen; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/TimestampColumnStatsData.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/TimestampColumnStatsData.java new file mode 100644 index 0000000..238cdfe --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/api/TimestampColumnStatsData.java @@ -0,0 +1,84 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.api; + +import java.io.Serializable; + +/** + * Compatibility shim for {@code TimestampColumnStatsData}. The Hive 4.x Thrift-generated class has + * both {@code setBitVectors(byte[])} and {@code setBitVectors(ByteBuffer)}, and similarly for + * {@code setHistogram}, which causes Jackson {@code InvalidDefinitionException}. This shim exposes + * only the {@code byte[]} setters, resolving the conflict. + */ +public class TimestampColumnStatsData implements Serializable { + + private Timestamp lowValue; + private Timestamp highValue; + private long numNulls; + private long numDVs; + private byte[] bitVectors; + private byte[] histogram; + + public TimestampColumnStatsData() {} + + public Timestamp getLowValue() { + return lowValue; + } + + public void setLowValue(Timestamp lowValue) { + this.lowValue = lowValue; + } + + public Timestamp getHighValue() { + return highValue; + } + + public void setHighValue(Timestamp highValue) { + this.highValue = highValue; + } + + public long getNumNulls() { + return numNulls; + } + + public void setNumNulls(long numNulls) { + this.numNulls = numNulls; + } + + public long getNumDVs() { + return numDVs; + } + + public void setNumDVs(long numDVs) { + this.numDVs = numDVs; + } + + public byte[] getBitVectors() { + return bitVectors; + } + + public void setBitVectors(byte[] bitVectors) { + this.bitVectors = bitVectors; + } + + public byte[] getHistogram() { + return histogram; + } + + public void setHistogram(byte[] histogram) { + this.histogram = histogram; + } +} diff --git a/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/events/CreateTableEvent.java b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/events/CreateTableEvent.java new file mode 100644 index 0000000..ae35a9d --- /dev/null +++ b/drone-fly-app/src/main/java/org/apache/hadoop/hive/metastore/events/CreateTableEvent.java @@ -0,0 +1,60 @@ +/** + * Copyright (C) 2020-2026 Expedia, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.metastore.events; + +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.metastore.IHMSHandler; +import org.apache.hadoop.hive.metastore.api.Table; + +/** + * Compatibility shim for {@code CreateTableEvent}. Provides both the Hive 3.x 3-argument + * constructor (required by libraries compiled against Hive 3.x, e.g. apiary-hive-events <= + * 8.1.15) and the Hive 4.x 4-argument constructor used by the rest of this application. + * + *

This class shadows the Hive jar's {@code CreateTableEvent} because it appears earlier on the + * classpath (compiled into {@code target/classes}), resolving the {@code NoSuchMethodError} thrown + * by {@code JsonMetaStoreEventSerDe$HeplerApiaryListenerEvent} at runtime. + */ +public class CreateTableEvent extends ListenerEvent { + + private final Table table; + private final boolean isReplicated; + + /** + * Compatibility constructor for libraries compiled against Hive 3.x (e.g. apiary-hive-events + * <= 8.1.15 / {@code JsonMetaStoreEventSerDe}). + */ + public CreateTableEvent(Table table, boolean status, HiveMetaStore.HMSHandler handler) { + super(status, handler); + this.table = table; + this.isReplicated = false; + } + + /** Hive 4.x constructor. */ + public CreateTableEvent(Table table, boolean status, IHMSHandler handler, boolean isReplicated) { + super(status, handler); + this.table = table; + this.isReplicated = isReplicated; + } + + public Table getTable() { + return table; + } + + public boolean isReplicated() { + return isReplicated; + } +} diff --git a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunnerTest.java b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunnerTest.java index 1f5f19e..9e99187 100644 --- a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunnerTest.java +++ b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunnerTest.java @@ -26,7 +26,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; -import org.awaitility.Duration; +import java.time.Duration; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -56,7 +56,7 @@ public void init() { public void typical() throws IOException, InterruptedException { runRunner(); await() - .atMost(Duration.FIVE_SECONDS) + .atMost(Duration.ofSeconds(5)) .untilAsserted(() -> { verify(droneFlyNotificationService, atLeast(1)).notifyListeners(); } @@ -70,7 +70,7 @@ public void typicalRunWithException() throws Exception { doNothing().doThrow(new RuntimeException()).doNothing().when(droneFlyNotificationService).notifyListeners(); runRunner(); await() - .atMost(Duration.FIVE_SECONDS) + .atMost(Duration.ofSeconds(5)) .untilAsserted(() -> { verify(droneFlyNotificationService, atLeast(3)).notifyListeners(); } diff --git a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationServiceTest.java b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationServiceTest.java index 6b02e50..1221e51 100644 --- a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationServiceTest.java +++ b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationServiceTest.java @@ -28,7 +28,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.MetaStoreEventListener; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; @@ -190,7 +190,7 @@ private void assertEvent(CreateTableEvent event) { private CreateTableEvent createTableEvent() throws MetaException { CreateTableEvent event = new CreateTableEvent( HiveTableTestUtils.createPartitionedTable("test_db", "test_table", "s3://test_location"), true, - new HMSHandler("test", new HiveConf(), false)); + new HMSHandler("test", new HiveConf()), false); return event; } diff --git a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterServiceTest.java b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterServiceTest.java index c071b53..e4c3558 100644 --- a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterServiceTest.java +++ b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterServiceTest.java @@ -21,12 +21,13 @@ import static org.mockito.Mockito.when; import java.util.Arrays; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Map; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.api.GetTableResult; -import org.apache.hadoop.hive.metastore.api.InsertEventRequestData; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; @@ -49,6 +50,7 @@ import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryDropTableEvent; import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryInsertEvent; import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEventFactory; +import com.expediagroup.apiary.extensions.events.metastore.event.EventType; import com.expediagroup.dataplatform.dronefly.app.service.factory.HMSHandlerFactory; public class HiveEventConverterServiceTest { @@ -180,13 +182,24 @@ public void insertEvent() throws MetaException, NoSuchObjectException { hiveEventConverterService = new HiveEventConverterService(hmsHandlerFactory); - InsertEvent InsertEvent = createInsertEvent(hmsHandlerFactory); - ApiaryInsertEvent apiaryInsertEvent = apiaryListenerEventFactory.create(InsertEvent); + // Create ApiaryInsertEvent directly (not via factory) to avoid Hive 3.x/4.x binary incompatibility + ApiaryInsertEvent apiaryInsertEvent = mock(ApiaryInsertEvent.class); + when(apiaryInsertEvent.getEventType()).thenReturn(EventType.ON_INSERT); + when(apiaryInsertEvent.getDatabaseName()).thenReturn(DB_NAME); + when(apiaryInsertEvent.getTableName()).thenReturn(TABLE_NAME); + Map partKeyValues = new LinkedHashMap<>(); + partKeyValues.put("partition1", "p1"); + partKeyValues.put("partition2", "p2"); + when(apiaryInsertEvent.getPartitionKeyValues()).thenReturn(partKeyValues); + when(apiaryInsertEvent.getFiles()).thenReturn(Arrays.asList("file:/a/b.txt", "file:/a/c.txt")); + when(apiaryInsertEvent.getFileChecksums()).thenReturn(Arrays.asList("123", "456")); + when(apiaryInsertEvent.getStatus()).thenReturn(true); + InsertEvent result = (InsertEvent) hiveEventConverterService.toHiveEvent(apiaryInsertEvent); assertThat(result.getHandler().getName()).isEqualTo(APP_NAME); - assertThat(result.getDb()).isEqualTo(DB_NAME); - assertThat(result.getTable()).isEqualTo(TABLE_NAME); + assertThat(result.getTableObj().getDbName()).isEqualTo(DB_NAME); + assertThat(result.getTableObj().getTableName()).isEqualTo(TABLE_NAME); assertThat(result.getFiles()).isEqualTo(Arrays.asList("file:/a/b.txt", "file:/a/c.txt")); assertThat(result.getFileChecksums()).isEqualTo(Arrays.asList("123", "456")); } @@ -197,17 +210,6 @@ public void nullEvent() throws MetaException, NoSuchObjectException { assertThat(result).isNull(); } - private InsertEvent createInsertEvent(HMSHandlerFactory hmsHandlerFactory) - throws MetaException, NoSuchObjectException { - List files = Arrays.asList("file:/a/b.txt", "file:/a/c.txt"); - List fileChecksums = Arrays.asList("123", "456"); - InsertEventRequestData insertRequestData = new InsertEventRequestData(files); - insertRequestData.setFilesAddedChecksum(fileChecksums); - InsertEvent event = new InsertEvent(DB_NAME, TABLE_NAME, PARTITION_VALUES, insertRequestData, true, - hmsHandlerFactory.newInstance()); - return event; - } - private AddPartitionEvent createAddPartitionEvent() throws MetaException { AddPartitionEvent event = new AddPartitionEvent(hiveTable, partition, true, hmsHandler); return event; @@ -215,7 +217,8 @@ private AddPartitionEvent createAddPartitionEvent() throws MetaException { private AlterPartitionEvent createAlterPartitionEvent() throws MetaException { Partition oldPartition = HiveTableTestUtils.newPartition(hiveTable, PARTITION_VALUES, OLD_PARTITION_LOCATION); - AlterPartitionEvent event = new AlterPartitionEvent(oldPartition, partition, hiveTable, true, hmsHandler); + // Hive 4.x: (oldPartition, newPartition, table, status, isTruncateOp, writeId, handler) + AlterPartitionEvent event = new AlterPartitionEvent(oldPartition, partition, hiveTable, true, false, null, hmsHandler); return event; } @@ -225,18 +228,21 @@ private DropPartitionEvent createDropPartitionEvent() throws MetaException { } private CreateTableEvent createCreateTableEvent() throws MetaException { - CreateTableEvent event = new CreateTableEvent(hiveTable, true, hmsHandler); + // Hive 4.x: (table, status, handler, isReplicated) + CreateTableEvent event = new CreateTableEvent(hiveTable, true, hmsHandler, false); return event; } private AlterTableEvent createAlterTableEvent() throws MetaException { Table oldTable = HiveTableTestUtils.createPartitionedTable(DB_NAME, TABLE_NAME, OLD_TABLE_LOCATION); - AlterTableEvent event = new AlterTableEvent(oldTable, hiveTable, true, hmsHandler); + // Hive 4.x: (oldTable, newTable, isTruncateOp, status, writeId, handler, isReplicated) + AlterTableEvent event = new AlterTableEvent(oldTable, hiveTable, false, true, null, hmsHandler, false); return event; } private DropTableEvent createDropTableEvent() throws MetaException { - DropTableEvent event = new DropTableEvent(hiveTable, true, false, hmsHandler); + // Hive 4.x: (table, status, deleteData, handler, isReplicated) + DropTableEvent event = new DropTableEvent(hiveTable, true, false, hmsHandler, false); return event; } diff --git a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactoryTest.java b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactoryTest.java index 29558c0..01d1540 100644 --- a/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactoryTest.java +++ b/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactoryTest.java @@ -18,7 +18,7 @@ import static org.assertj.core.api.Assertions.assertThat; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.api.MetaException; import org.junit.jupiter.api.Test; diff --git a/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/exception/DroneFlyException.java b/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/exception/DroneFlyException.java index 3bf1ec5..ff96d80 100644 --- a/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/exception/DroneFlyException.java +++ b/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/exception/DroneFlyException.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2020 Expedia, Inc. + * Copyright (C) 2020-2026 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/drone-fly-integration-tests/pom.xml b/drone-fly-integration-tests/pom.xml index d635ca2..3e221a0 100644 --- a/drone-fly-integration-tests/pom.xml +++ b/drone-fly-integration-tests/pom.xml @@ -26,18 +26,34 @@ jdk.tools jdk.tools + + org.slf4j + slf4j-reload4j + + + tomcat + jasper-compiler + + + tomcat + jasper-runtime + com.expediagroup.apiary kafka-metastore-listener - 6.0.2 + 8.1.15 test org.slf4j slf4j-log4j12 + + org.apache.hive + hive-metastore + @@ -70,12 +86,6 @@ org.springframework.boot spring-boot-starter-test test - - - org.junit.vintage - junit-vintage-engine - - org.junit.jupiter @@ -90,7 +100,6 @@ io.dropwizard.metrics metrics-core - ${dropwizard.version} test diff --git a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTest.java b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTest.java index 8cffcba..507bc3c 100644 --- a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTest.java +++ b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTest.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2020-2025 Expedia, Inc. + * Copyright (C) 2020-2026 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -44,7 +44,7 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler; +import org.apache.hadoop.hive.metastore.HMSHandler; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.events.AddPartitionEvent; import org.apache.hadoop.hive.metastore.events.CreateTableEvent; @@ -52,7 +52,7 @@ import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.common.serialization.StringDeserializer; -import org.awaitility.Duration; +import java.time.Duration; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -110,7 +110,7 @@ void setUp() throws InterruptedException { initKafkaListener(); executorService.execute(() -> DroneFly.main(new String[] {})); - await().atMost(Duration.FIVE_MINUTES).until(DroneFly::isRunning); + await().atMost(Duration.ofMinutes(5)).until(DroneFly::isRunning); } @AfterEach @@ -129,7 +129,7 @@ public void typical() { AddPartitionEvent addPartitionEvent = new AddPartitionEvent(buildTable(), buildPartition(), true, hmsHandler); kafkaMetaStoreEventListener.onAddPartition(addPartitionEvent); - CreateTableEvent createTableEvent = new CreateTableEvent(buildTable(), true, hmsHandler); + CreateTableEvent createTableEvent = new CreateTableEvent(buildTable(), true, hmsHandler, false); kafkaMetaStoreEventListener.onCreateTable(createTableEvent); await().atMost(5, TimeUnit.SECONDS).until(() -> DummyListener.getNumEvents() > 1); diff --git a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTestUtils.java b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTestUtils.java index 138425f..2f621c2 100644 --- a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTestUtils.java +++ b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTestUtils.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2020 Expedia, Inc. + * Copyright (C) 2020-2026 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -55,7 +55,10 @@ public static Partition buildPartition(String partitionName) { values.add(partitionName + "2"); StorageDescriptor sd = new StorageDescriptor(); sd.setStoredAsSubDirectories(false); - return new Partition(values, DATABASE, TABLE, 1, 1, sd, buildTableParameters()); + Partition partition = new Partition(values, DATABASE, TABLE, 1, 1, sd, buildTableParameters()); + partition.setWriteId(-1); + partition.setIsStatsCompliant(false); + return partition; } public static Map buildTableParameters() { diff --git a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DummyListener.java b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DummyListener.java index dbb9590..1ff92bd 100644 --- a/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DummyListener.java +++ b/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DummyListener.java @@ -1,5 +1,5 @@ /** - * Copyright (C) 2020-2025 Expedia, Inc. + * Copyright (C) 2020 Expedia, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/pom.xml b/pom.xml index 41d0e76..00accf7 100644 --- a/pom.xml +++ b/pom.xml @@ -22,26 +22,32 @@ - 3.12.2 - 3.1.6 - 5.6.0 - 3.12.4 - 2.7.10 - 5.3.25 - 1.2.3 + + 21 + 21 + 21 + 21 + 21 + + + true + 0.8.12 + 3.2.5 + 3.13.0 + 4.9.8.2 + + 3.2.12 3.2.4 3.4.3 - openjdk - 8-jdk + amazoncorretto + 21-al2023 ${docker.from.image}:${docker.from.tag} expediagroup ${project.artifactId} ${dockerhub.url}/${docker.registry}/${docker.to.image}:${docker.to.tag} ${project.version} docker.io - 2.17.1 - 1.1.9 - 3.1.0 + 2.2.0 true @@ -55,26 +61,6 @@ - - org.springframework - spring-core - ${springframework.version} - - - org.springframework - spring-context - ${springframework.version} - - - org.springframework - spring-beans - ${springframework.version} - - - org.springframework - spring-aop - ${springframework.version} - org.springframework.boot spring-boot-dependencies @@ -82,55 +68,6 @@ pom import - - ch.qos.logback - logback-core - ${logback.version} - - - - - org.apache.logging.log4j - log4j-core - ${log4j2.version} - - - org.apache.logging.log4j - log4j-api - ${log4j2.version} - - - org.apache.logging.log4j - log4j-web - ${log4j2.version} - - - org.apache.logging.log4j - log4j-jul - ${log4j2.version} - - - org.apache.logging.log4j - log4j-slf4j-impl - ${log4j2.version} - - - org.apache.logging.log4j - log4j-1.2-api - ${log4j2.version} - - - org.apache.logging.log4j - log4j-to-slf4j - ${log4j2.version} - - - - org.awaitility - awaitility - ${awaitility.version} - test - @@ -138,29 +75,25 @@ org.junit.jupiter junit-jupiter - ${junit.jupiter.version} test org.mockito mockito-core - ${mockito.version} test org.mockito mockito-junit-jupiter - ${mockito.version} test org.assertj assertj-core - ${assertj.version} test - + @@ -196,10 +129,10 @@ arm64 linux - + - ${docker.to.reference} + ${docker.to.reference} ${DOCKERHUB_USERNAME} ${DOCKERHUB_PASSWORD} @@ -211,6 +144,17 @@ ${docker.container.port} + + --add-opens=java.base/java.lang=ALL-UNNAMED + --add-opens=java.base/java.lang.reflect=ALL-UNNAMED + --add-opens=java.base/java.io=ALL-UNNAMED + --add-opens=java.base/java.net=ALL-UNNAMED + --add-opens=java.base/java.nio=ALL-UNNAMED + --add-opens=java.base/java.util=ALL-UNNAMED + --add-opens=java.base/java.util.concurrent=ALL-UNNAMED + --add-opens=java.base/sun.nio.ch=ALL-UNNAMED + --add-opens=java.base/java.security=ALL-UNNAMED + @@ -219,6 +163,33 @@ maven-shade-plugin ${maven.shade.plugin.version} + + org.apache.maven.plugins + maven-compiler-plugin + ${maven.compiler.plugin.version} + + 21 + + + + org.apache.maven.plugins + maven-surefire-plugin + ${maven.surefire.plugin.version} + + + @{argLine} + --add-opens java.base/java.lang=ALL-UNNAMED + --add-opens java.base/java.lang.reflect=ALL-UNNAMED + --add-opens java.base/java.io=ALL-UNNAMED + --add-opens java.base/java.net=ALL-UNNAMED + --add-opens java.base/java.nio=ALL-UNNAMED + --add-opens java.base/java.util=ALL-UNNAMED + --add-opens java.base/java.util.concurrent=ALL-UNNAMED + --add-opens java.base/sun.nio.ch=ALL-UNNAMED + --add-opens java.base/java.security=ALL-UNNAMED + + + com.mycila license-maven-plugin @@ -247,7 +218,7 @@ hive-common - + false exec org.mortbay.jetty,org.eclipse.jetty,org.eclipse.jetty.aggregate,org.eclipse.jetty.orbit @@ -257,10 +228,6 @@ org.codehaus.jettison jettison - - javax.servlet - servlet-api -