Skip to content

Commit cbcd2c6

Browse files
committed
review comments
1 parent 5e7d25e commit cbcd2c6

File tree

6 files changed

+90
-24
lines changed

6 files changed

+90
-24
lines changed

dev/test-dependencies.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ HADOOP_MODULE_PROFILES="-Phive-thriftserver -Pkubernetes -Pyarn -Phive \
3434
-Pspark-ganglia-lgpl -Pkinesis-asl -Phadoop-cloud -Pjvm-profiler"
3535
MVN="build/mvn"
3636
HADOOP_HIVE_PROFILES=(
37-
hadoop-3-hive-4.0
37+
hadoop-3-hive-4.1
3838
)
3939

4040
MVN_EXEC_PLUGIN_VERSION=$(build/mvn help:evaluate \
@@ -85,7 +85,7 @@ $MVN -q versions:set -DnewVersion=$TEMP_VERSION -DgenerateBackupPoms=false > /de
8585

8686
# Generate manifests for each Hadoop profile:
8787
for HADOOP_HIVE_PROFILE in "${HADOOP_HIVE_PROFILES[@]}"; do
88-
if [[ $HADOOP_HIVE_PROFILE == **hadoop-3-hive-4.0** ]]; then
88+
if [[ $HADOOP_HIVE_PROFILE == **hadoop-3-hive-4.1** ]]; then
8989
HADOOP_PROFILE=hadoop-3
9090
fi
9191
echo "Performing Maven install for $HADOOP_HIVE_PROFILE"

pom.xml

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1996,6 +1996,34 @@
19961996
<groupId>com.jayway.jsonpath</groupId>
19971997
<artifactId>json-path</artifactId>
19981998
</exclusion>
1999+
<exclusion>
2000+
<groupId>om.amazonaws.secretsmanager</groupId>
2001+
<artifactId>caws-secretsmanager-caching-java</artifactId>
2002+
</exclusion>
2003+
<exclusion>
2004+
<groupId>org.graalvm.js</groupId>
2005+
<artifactId>js-scriptengine</artifactId>
2006+
</exclusion>
2007+
<exclusion>
2008+
<groupId>org.graalvm.js</groupId>
2009+
<artifactId>js</artifactId>
2010+
</exclusion>
2011+
<exclusion>
2012+
<groupId>org.apache.atlas</groupId>
2013+
<artifactId>atlas-client-v2</artifactId>
2014+
</exclusion>
2015+
<exclusion>
2016+
<groupId>org.apache.atlas</groupId>
2017+
<artifactId>atlas-client-common</artifactId>
2018+
</exclusion>
2019+
<exclusion>
2020+
<groupId>com.amazonaws.secretsmanager</groupId>
2021+
<artifactId>aws-secretsmanager-caching-java</artifactId>
2022+
</exclusion>
2023+
<exclusion>
2024+
<groupId>org.apache.commons</groupId>
2025+
<artifactId>commons-dbcp2</artifactId>
2026+
</exclusion>
19992027
</exclusions>
20002028
</dependency>
20012029
<dependency>
@@ -2383,6 +2411,22 @@
23832411
<groupId>com.github.joshelser</groupId>
23842412
<artifactId>dropwizard-metrics-hadoop-metrics2-reporter</artifactId>
23852413
</exclusion>
2414+
<exclusion>
2415+
<groupId>org.apache.orc</groupId>
2416+
<artifactId>orc-core</artifactId>
2417+
</exclusion>
2418+
<exclusion>
2419+
<groupId>io.grpc</groupId>
2420+
<artifactId>grpc-netty-shaded</artifactId>
2421+
</exclusion>
2422+
<exclusion>
2423+
<groupId>io.grpc</groupId>
2424+
<artifactId>grpc-protobuf</artifactId>
2425+
</exclusion>
2426+
<exclusion>
2427+
<groupId>io.grpc</groupId>
2428+
<artifactId>grpc-stub</artifactId>
2429+
</exclusion>
23862430
</exclusions>
23872431
</dependency>
23882432
<dependency>

project/SparkBuild.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1898,7 +1898,6 @@ object TestSettings {
18981898
}.getOrElse(Nil): _*),
18991899
// Show full stack trace and duration in test cases.
19001900
(Test / testOptions) += Tests.Argument("-oDF"),
1901-
(Test / testOptions) += Tests.Argument(TestFrameworks.ScalaTest, "-fG", "scalatest.txt"),
19021901
// Slowpoke notifications: receive notifications every 5 minute of tests that have been running
19031902
// longer than two minutes.
19041903
(Test / testOptions) += Tests.Argument(TestFrameworks.ScalaTest, "-W", "120", "300"),

repl/src/test/scala/org/apache/spark/repl/SparkShellSuite.scala

Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -95,25 +95,6 @@ class SparkShellSuite extends SparkFunSuite {
9595
}
9696
}
9797

98-
def handleException(cause: Throwable): Unit = lock.synchronized {
99-
val message =
100-
s"""
101-
|=======================
102-
|SparkShellSuite failure output
103-
|=======================
104-
|Spark Shell command line: ${command.mkString(" ")}
105-
|Exception: $cause
106-
|Failed to capture next expected output "${expectedAnswers(next)}" within $timeout.
107-
|
108-
|${buffer.mkString("\n")}
109-
|===========================
110-
|End SparkShellSuite failure output
111-
|===========================
112-
""".stripMargin
113-
logError(message, cause)
114-
fail(message, cause)
115-
}
116-
11798
val process = new ProcessBuilder(command: _*).start()
11899

119100
val stdinWriter = new OutputStreamWriter(process.getOutputStream, StandardCharsets.UTF_8)
@@ -138,7 +119,23 @@ class SparkShellSuite extends SparkFunSuite {
138119
}
139120
ThreadUtils.awaitResult(foundAllExpectedAnswers.future, timeoutForQuery)
140121
log.info("Found all expected output.")
141-
} catch { case cause: Throwable => handleException(cause)
122+
} catch { case cause: Throwable =>
123+
val message =
124+
s"""
125+
|=======================
126+
|SparkShellSuite failure output
127+
|=======================
128+
|Spark Shell command line: ${command.mkString(" ")}
129+
|Exception: $cause
130+
|Failed to capture next expected output "${expectedAnswers(next)}" within $timeout.
131+
|
132+
|${buffer.mkString("\n")}
133+
|===========================
134+
|End SparkShellSuite failure output
135+
|===========================
136+
""".stripMargin
137+
logError(message, cause)
138+
fail(message, cause)
142139
} finally {
143140
if (!process.waitFor(1, MINUTES)) {
144141
try {

sql/hive/pom.xml

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,32 @@
165165
<groupId>org.apache.calcite</groupId>
166166
<artifactId>calcite-core</artifactId>
167167
<scope>${calcite.deps.scope}</scope>
168+
<exclusions>
169+
<exclusion>
170+
<groupId>org.locationtech.jts.io</groupId>
171+
<artifactId>jts-io-common</artifactId>
172+
</exclusion>
173+
<exclusion>
174+
<groupId>com.google.uzaygezen</groupId>
175+
<artifactId>uzaygezen-core</artifactId>
176+
</exclusion>
177+
<exclusion>
178+
<groupId>com.jayway.jsonpath</groupId>
179+
<artifactId>json-path</artifactId>
180+
</exclusion>
181+
<exclusion>
182+
<groupId>com.yahoo.datasketches</groupId>
183+
<artifactId>sketches-core</artifactId>
184+
</exclusion>
185+
<exclusion>
186+
<groupId>org.apiguardian</groupId>
187+
<artifactId>apiguardian-api</artifactId>
188+
</exclusion>
189+
<exclusion>
190+
<groupId>net.hydromatic</groupId>
191+
<artifactId>aggdesigner-algorithm</artifactId>
192+
</exclusion>
193+
</exclusions>
168194
</dependency>
169195
<dependency>
170196
<groupId>org.apache.hadoop</groupId>

sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveVersionSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ private[client] abstract class HiveVersionSuite(version: String) extends SparkFu
3535
hadoopConf.set("datanucleus.autoStartMechanismMode", "ignored")
3636
hadoopConf.set("hive.metastore.schema.verification", "false")
3737
// Since Hive 3.0, HIVE-19310 skipped `ensureDbInit` if `hive.in.test=false`.
38-
if (version == "3.0" || version == "3.1" || version == "4.1") {
38+
if (Seq("3.0", "3.1", "4.0", "4.1").contains(version)) {
3939
hadoopConf.set("hive.in.test", "true")
4040
}
4141
HiveClientBuilder.buildClient(version, hadoopConf)

0 commit comments

Comments
 (0)