Skip to content

Commit cd95f5d

Browse files
committed
code review
1 parent 3f96149 commit cd95f5d

File tree

3 files changed

+6
-96
lines changed

3 files changed

+6
-96
lines changed

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DaysWritableV2.scala

Lines changed: 0 additions & 89 deletions
This file was deleted.

sql/hive/src/main/scala/org/apache/spark/sql/hive/TableReader.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ class HadoopTableReader(
110110
*/
111111
def makeRDDForTable(
112112
hiveTable: HiveTable,
113-
abstractSerDeClass: Class[_ <: AbstractSerDe],
113+
deserializerClass: Class[_ <: AbstractSerDe],
114114
filterOpt: Option[PathFilter]): RDD[InternalRow] = {
115115

116116
assert(!hiveTable.isPartitioned,
@@ -133,11 +133,11 @@ class HadoopTableReader(
133133

134134
val deserializedHadoopRDD = hadoopRDD.mapPartitions { iter =>
135135
val hconf = broadcastedHadoopConf.value.value
136-
val abstractSerDe = abstractSerDeClass.getConstructor().newInstance()
136+
val deserializer = deserializerClass.getConstructor().newInstance()
137137
DeserializerLock.synchronized {
138-
abstractSerDe.initialize(hconf, localTableDesc.getProperties, null)
138+
deserializer.initialize(hconf, localTableDesc.getProperties, null)
139139
}
140-
HadoopTableReader.fillObject(iter, abstractSerDe, attrsWithIndex, mutableRow, abstractSerDe)
140+
HadoopTableReader.fillObject(iter, deserializer, attrsWithIndex, mutableRow, deserializer)
141141
}
142142

143143
deserializedHadoopRDD
@@ -181,7 +181,7 @@ class HadoopTableReader(
181181
}
182182

183183
val broadcastedHiveConf = _broadcastedHadoopConf
184-
val localAbstractSerDeClass = partDeserializerClass
184+
val localDeserializerClass = partDeserializerClass
185185
val mutableRow = new SpecificInternalRow(attributes.map(_.dataType))
186186

187187
// Splits all attributes into two groups, partition key attributes and those that are not.
@@ -210,7 +210,7 @@ class HadoopTableReader(
210210

211211
createHadoopRDD(partDesc, inputPathStr).mapPartitions { iter =>
212212
val hconf = broadcastedHiveConf.value.value
213-
val deserializer = localAbstractSerDeClass.getConstructor().newInstance()
213+
val deserializer = localDeserializerClass.getConstructor().newInstance()
214214
.asInstanceOf[AbstractSerDe]
215215
// SPARK-13709: For SerDes like AvroSerDe, some essential information (e.g. Avro schema
216216
// information) may be defined in table properties. Here we should merge table properties

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveShim.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -520,7 +520,6 @@ private[client] class Shim_v2_0 extends Shim with Logging {
520520
override def alterTable(hive: Hive, tableName: String, table: Table): Unit = {
521521
recordHiveCall()
522522
alterTableMethod.invoke(hive, tableName, table)
523-
// hive.alterTable(tableName, table, )
524523
}
525524

526525
override def alterPartitions(hive: Hive, tableName: String, newParts: JList[Partition]): Unit = {

0 commit comments

Comments
 (0)