Skip to content

Commit e324c30

Browse files
committed
rebase
1 parent 7c3bbf1 commit e324c30

File tree

3 files changed

+19
-5
lines changed

3 files changed

+19
-5
lines changed

project/SparkBuild.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -362,6 +362,8 @@ object SparkBuild extends PomBuild {
362362
"-sourcepath", (ThisBuild / baseDirectory).value.getAbsolutePath // Required for relative source links in scaladoc
363363
),
364364

365+
(Test / javaOptions) += s"-Dmvn.executable=${BuildCommons.sparkHome.getAbsolutePath}/build/mvn",
366+
365367
SbtPomKeys.profiles := profiles,
366368

367369
// Remove certain packages from Scaladoc
@@ -1305,7 +1307,7 @@ object SQL {
13051307
// even if the project is not using it.
13061308
PB.protocVersion := BuildCommons.protoVersion,
13071309
// For some reason the resolution from the imported Maven build does not work for some
1308-
// of these dependendencies that we need to shade later on.
1310+
// of these dependencies that we need to shade later on.
13091311
libraryDependencies ++= {
13101312
Seq(
13111313
"com.google.protobuf" % "protobuf-java" % protoVersion % "protobuf"

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@ import jline.console.history.FileHistory
2929
import org.apache.hadoop.conf.Configuration
3030
import org.apache.hadoop.hive.cli.{CliDriver, CliSessionState, OptionsProcessor}
3131
import org.apache.hadoop.hive.common.HiveInterruptUtils
32-
import org.apache.hadoop.hive.common.io.SessionStream
3332
import org.apache.hadoop.hive.conf.HiveConf
3433
import org.apache.hadoop.hive.ql.Driver
3534
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper

sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala

Lines changed: 16 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,37 @@
1717

1818
package org.apache.spark.sql.hive.thriftserver
1919

20-
import java.io.PrintStream
20+
import java.io.{PrintStream, UnsupportedEncodingException}
2121
import java.nio.charset.StandardCharsets.UTF_8
2222

23+
import scala.sys.exit
24+
25+
import org.apache.hadoop.hive.common.io.SessionStream
26+
2327
import org.apache.spark.{SparkConf, SparkContext}
2428
import org.apache.spark.internal.Logging
2529
import org.apache.spark.sql.SparkSession
2630
import org.apache.spark.sql.hive.HiveExternalCatalog
2731
import org.apache.spark.sql.hive.HiveUtils._
2832
import org.apache.spark.sql.internal.SQLConf
2933
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION
34+
import org.apache.spark.util.SparkExitCode.ERROR_PATH_NOT_FOUND
3035
import org.apache.spark.util.Utils
3136

3237
/** A singleton object for the master program. The executors should not access this. */
3338
private[hive] object SparkSQLEnv extends Logging {
3439
logDebug("Initializing SparkSQLEnv")
3540

36-
val out = new PrintStream(System.out, true, UTF_8)
37-
val err = new PrintStream(System.err, true, UTF_8)
41+
private def createSessionStream(s: PrintStream): SessionStream = {
42+
try {
43+
new SessionStream(s, true, UTF_8.name ())
44+
} catch {
45+
case e: UnsupportedEncodingException => exit(ERROR_PATH_NOT_FOUND)
46+
}
47+
}
48+
49+
val out: SessionStream = createSessionStream(System.out)
50+
val err: SessionStream = createSessionStream(System.err)
3851

3952
var sparkSession: SparkSession = _
4053
var sparkContext: SparkContext = _

0 commit comments

Comments
 (0)