Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,17 @@ object StaticSQLConf {
.booleanConf
.createWithDefault(false)

val HIVE_THRIFT_SERVER_HTTP_SNI_HOST_CHECK_ENABLED =
buildStaticConf("spark.sql.hive.thriftServer.http.sniHostCheckEnabled")
.internal()
.doc("Whether to enable Jetty's SNI host check on the ThriftHttpCLIService HTTPS " +
"connector. Since SPARK-45522 (Jetty 10+), Spark has disabled SNI host check to " +
"preserve backward compatibility. Set to true to enforce SNI host checking for " +
"stricter security. See SPARK-54293.")
.version("4.2.0")
.booleanConf
.createWithDefault(false)

val SPARK_SESSION_EXTENSIONS = buildStaticConf("spark.sql.extensions")
.doc("A comma-separated list of classes that implement " +
"Function1[SparkSessionExtensions, Unit] used to configure Spark Session extensions. The " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@
import org.eclipse.jetty.server.AbstractConnectionFactory;
import org.eclipse.jetty.server.ConnectionFactory;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.SecureRequestCustomizer;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
Expand All @@ -50,9 +52,11 @@
public class ThriftHttpCLIService extends ThriftCLIService {

protected org.eclipse.jetty.server.Server httpServer;
private final boolean sniHostCheckEnabled;

public ThriftHttpCLIService(CLIService cliService) {
public ThriftHttpCLIService(CLIService cliService, boolean sniHostCheckEnabled) {
super(cliService, ThriftHttpCLIService.class.getSimpleName());
this.sniHostCheckEnabled = sniHostCheckEnabled;
}

@Override
Expand Down Expand Up @@ -91,8 +95,15 @@ protected void initializeServer() {
Arrays.toString(sslContextFactoryServer.getExcludeProtocols()));
sslContextFactoryServer.setKeyStorePath(keyStorePath);
sslContextFactoryServer.setKeyStorePassword(keyStorePassword);
// SPARK-54293: Configure SNI host check, which defaults to true since Jetty 10.
// Controlled by spark.sql.hive.thriftServer.http.sniHostCheckEnabled (default: false),
// consistent with the fix in JettyUtils.scala (SPARK-45522).
HttpConfiguration httpConfig = new HttpConfiguration();
SecureRequestCustomizer src = new SecureRequestCustomizer();
src.setSniHostCheck(sniHostCheckEnabled);
httpConfig.addCustomizer(src);
connectionFactories = AbstractConnectionFactory.getFactories(
sslContextFactoryServer, new HttpConnectionFactory());
sslContextFactoryServer, new HttpConnectionFactory(httpConfig));
} else {
connectionFactories = new ConnectionFactory[] { new HttpConnectionFactory() };
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import org.apache.spark.sql.{SparkSession, SQLContext}
import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.hive.thriftserver.ReflectionUtils._
import org.apache.spark.sql.hive.thriftserver.ui._
import org.apache.spark.sql.internal.StaticSQLConf
import org.apache.spark.status.ElementTrackingStore
import org.apache.spark.util.{ShutdownHookManager, Utils}

Expand Down Expand Up @@ -158,7 +159,9 @@ private[hive] class HiveThriftServer2(sparkSession: SparkSession)
addService(sparkSqlCliService)

val thriftCliService = if (isHTTPTransportMode(hiveConf)) {
new ThriftHttpCLIService(sparkSqlCliService)
val sniHostCheckEnabled = sparkSession.conf.get(
StaticSQLConf.HIVE_THRIFT_SERVER_HTTP_SNI_HOST_CHECK_ENABLED)
new ThriftHttpCLIService(sparkSqlCliService, sniHostCheckEnabled)
} else {
new ThriftBinaryCLIService(sparkSqlCliService)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.thriftserver

import org.eclipse.jetty.server.{AbstractConnectionFactory, HttpConfiguration, HttpConnectionFactory, SecureRequestCustomizer}
import org.eclipse.jetty.util.ssl.SslContextFactory

import org.apache.spark.SparkFunSuite

class ThriftHttpCLIServiceSuite extends SparkFunSuite {

/**
* Helper that builds the SSL connection factory chain the same way
* ThriftHttpCLIService.initializeServer() does, using the given sniHostCheck value.
*/
private def buildSslFactoriesAndGetCustomizer(
sniHostCheckEnabled: Boolean): SecureRequestCustomizer = {
val sslContextFactory = new SslContextFactory.Server()
val httpConfig = new HttpConfiguration()
val src = new SecureRequestCustomizer()
src.setSniHostCheck(sniHostCheckEnabled)
httpConfig.addCustomizer(src)
val connectionFactories = AbstractConnectionFactory.getFactories(
sslContextFactory, new HttpConnectionFactory(httpConfig))

val httpFactory = connectionFactories
.find(_.isInstanceOf[HttpConnectionFactory])
.map(_.asInstanceOf[HttpConnectionFactory])
.getOrElse(fail("HttpConnectionFactory not found in SSL connection factories"))

httpFactory.getHttpConfiguration.getCustomizers.toArray
.find(_.isInstanceOf[SecureRequestCustomizer])
.map(_.asInstanceOf[SecureRequestCustomizer])
.getOrElse(fail("SecureRequestCustomizer not found in HttpConfiguration"))
}

test("SPARK-54293: SNI host check disabled by default") {
// Default behavior: sniHostCheckEnabled = false
val customizer = buildSslFactoriesAndGetCustomizer(sniHostCheckEnabled = false)
assert(!customizer.isSniHostCheck,
"SNI host check should be disabled when sniHostCheckEnabled is false")
}

test("SPARK-54293: SNI host check enabled when configured") {
// Opt-in behavior: sniHostCheckEnabled = true
val customizer = buildSslFactoriesAndGetCustomizer(sniHostCheckEnabled = true)
assert(customizer.isSniHostCheck,
"SNI host check should be enabled when sniHostCheckEnabled is true")
}

test("SPARK-54293: SSL connection factories without fix have SNI host check enabled") {
// Demonstrate that without the fix (no SecureRequestCustomizer), Jetty 10+
// defaults to sniHostCheck=true, which causes the bug.
val sslContextFactory = new SslContextFactory.Server()
val connectionFactories = AbstractConnectionFactory.getFactories(
sslContextFactory, new HttpConnectionFactory())

val httpFactory = connectionFactories
.find(_.isInstanceOf[HttpConnectionFactory])
.map(_.asInstanceOf[HttpConnectionFactory])
.getOrElse(fail("HttpConnectionFactory not found"))

val customizers = httpFactory.getHttpConfiguration.getCustomizers
val secureCustomizer = customizers.toArray
.find(_.isInstanceOf[SecureRequestCustomizer])
.map(_.asInstanceOf[SecureRequestCustomizer])

// Without the fix, either there's no SecureRequestCustomizer at all,
// or it has sniHostCheck=true (the Jetty 10+ default)
secureCustomizer match {
case Some(src) => assert(src.isSniHostCheck,
"Default Jetty 10+ behavior should have SNI host check enabled")
case None => // No customizer means Jetty adds one with defaults (sniHostCheck=true)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -648,6 +648,7 @@ spark.sql.hive.metastorePartitionPruningFastFallback
spark.sql.hive.metastorePartitionPruningInSetThreshold
spark.sql.hive.tablePropertyLengthThreshold
spark.sql.hive.thriftServer.async
spark.sql.hive.thriftServer.http.sniHostCheckEnabled
spark.sql.hive.thriftServer.singleSession
spark.sql.hive.useDelegateForSymlinkTextInputFormat
spark.sql.hive.version
Expand Down