diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala index 85c198290542..76a9e62d804c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/joins/HashedRelation.scala @@ -143,9 +143,8 @@ private[execution] object HashedRelation { new TaskMemoryManager( new UnifiedMemoryManager( new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"), - Long.MaxValue, - Long.MaxValue / 2, - 1), + Runtime.getRuntime.maxMemory, + Runtime.getRuntime.maxMemory / 2, 1), 0) } @@ -401,9 +400,8 @@ private[joins] class UnsafeHashedRelation( val taskMemoryManager = new TaskMemoryManager( new UnifiedMemoryManager( new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"), - Long.MaxValue, - Long.MaxValue / 2, - 1), + Runtime.getRuntime.maxMemory, + Runtime.getRuntime.maxMemory / 2, 1), 0) val pageSizeBytes = Option(SparkEnv.get).map(_.memoryManager.pageSizeBytes) @@ -576,9 +574,8 @@ private[execution] final class LongToUnsafeRowMap( new TaskMemoryManager( new UnifiedMemoryManager( new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"), - Long.MaxValue, - Long.MaxValue / 2, - 1), + Runtime.getRuntime.maxMemory, + Runtime.getRuntime.maxMemory / 2, 1), 0), 0) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala index b88a76bbfb57..d4743afbaa93 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/joins/HashedRelationSuite.scala @@ -27,7 +27,7 @@ import org.apache.spark.SparkConf import org.apache.spark.SparkException import org.apache.spark.internal.config._ import org.apache.spark.internal.config.Kryo._ -import org.apache.spark.memory.{TaskMemoryManager, UnifiedMemoryManager} +import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager, UnifiedMemoryManager} import org.apache.spark.serializer.KryoSerializer import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions._ @@ -42,9 +42,8 @@ import org.apache.spark.util.collection.CompactBuffer class HashedRelationSuite extends SharedSparkSession { val umm = new UnifiedMemoryManager( new SparkConf().set(MEMORY_OFFHEAP_ENABLED.key, "false"), - Long.MaxValue, - Long.MaxValue / 2, - 1) + Runtime.getRuntime.maxMemory, + Runtime.getRuntime.maxMemory / 2, 1) val mm = new TaskMemoryManager(umm, 0)