Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 9 additions & 3 deletions sdks/java/io/hcatalog/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,18 @@ dependencies {
// Calcite (a dependency of Hive) bundles without repackaging Guava which is why we redeclare it
// here so that it appears on the compile/test/runtime classpath before Calcite.
provided library.java.hadoop_common
provided "org.apache.hive:hive-exec:$hive_version"
provided project(path: ":vendor:hive-exec-4_0_1", configuration: "relocated")
provided(group: "org.apache.hive.hcatalog", name: "hive-hcatalog-core", version: hive_version) {
exclude group: "org.apache.hive", module: "hive-exec"
exclude group: "com.google.protobuf", module: "protobuf-java"
}
testImplementation library.java.commons_io
testImplementation library.java.junit
testImplementation library.java.hamcrest
testImplementation "org.apache.hive.hcatalog:hive-hcatalog-core:$hive_version:tests"
testImplementation "org.apache.hive:hive-exec:$hive_version"
testImplementation("org.apache.hive.hcatalog:hive-hcatalog-core:$hive_version:tests") {
exclude group: "org.apache.hive", module: "hive-exec"
}
testImplementation project(path: ":vendor:hive-exec-4_0_1", configuration: "relocated")
// datanucleus dependency version should be in alignment with managed dependencies of hive-standalone-metastore
testRuntimeOnly 'org.datanucleus:datanucleus-api-jdo:5.2.8'
testRuntimeOnly 'org.datanucleus:datanucleus-rdbms:5.2.10'
Expand All @@ -81,6 +83,10 @@ dependencies {
}
}

configurations.all {
exclude group: "org.apache.hive", module: "hive-exec"
}

hadoopVersions.each {kv ->
configurations."hadoopVersion$kv.key" {
resolutionStrategy {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -426,4 +426,17 @@ private void prepareTestData() throws Exception {
reCreateTestTable();
insertTestData(getConfigPropertiesAsMap(service.getHiveConf()));
}

@Test
public void testJodaTimeShading() {
org.joda.time.Instant instant = org.joda.time.Instant.ofEpochMilli(123L);
assertEquals(123L, instant.getMillis());
}

@Test
public void testJodaTimeRelocation() {
org.apache.beam.vendor.hive_exec.v4_0_1.org.joda.time.Instant instant =
new org.apache.beam.vendor.hive_exec.v4_0_1.org.joda.time.Instant(123L);
org.junit.Assert.assertEquals(123L, instant.getMillis());
}
}
1 change: 1 addition & 0 deletions settings.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,7 @@ include(":sdks:typescript:container")
include(":vendor:grpc-1_69_0")
include(":vendor:calcite-1_40_0")
include(":vendor:guava-32_1_2-jre")
include(":vendor:hive-exec-4_0_1")
include(":website")
include(":runners:google-cloud-dataflow-java:worker")
include(":runners:google-cloud-dataflow-java:worker:windmill")
Expand Down
53 changes: 53 additions & 0 deletions vendor/hive-exec-4_0_1/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
plugins {
id 'com.gradleup.shadow'
id 'java-library'
}

def hive_version = "4.0.1" // Keep in sync with sdks/java/io/hcatalog/build.gradle

repositories {
mavenCentral()
}

dependencies {
implementation("org.apache.hive:hive-exec:$hive_version") {
transitive = false // We only want the fat jar
}
}

shadowJar {
// Relocate the problematic packages to a unique prefix
def relocatedVersion = hive_version.replace('.', '_')
relocate 'org.joda.time', "org.apache.beam.vendor.hive_exec.v${relocatedVersion}.org.joda.time"

exclude '**/HiveParser.class'

archiveClassifier.set('shadow')
}

task extractHiveParser(type: Copy) {
def hiveExecJar = configurations.compileClasspath.findAll { it.name.contains('hive-exec') }
if (hiveExecJar.isEmpty()) {
hiveExecJar = configurations.runtimeClasspath.findAll { it.name.contains('hive-exec') }
}
from(zipTree(hiveExecJar.first())) {
include '**/HiveParser.class'
}
into "$buildDir/extracted"
}

tasks.register('finalJar', Jar) {
dependsOn shadowJar, extractHiveParser
archiveClassifier.set('')
from zipTree(shadowJar.archiveFile)
from "$buildDir/extracted"
}

configurations {
relocated
}

artifacts {
relocated(finalJar)
}

Loading