首页 新闻 会员 周边 捐助

运行spark程序报错:GC overhead limit exceeded

0
悬赏园豆:50 [已解决问题] 解决于 2020-04-13 10:12

1. 问题描述

提示错误内容为:

Driver stacktrace:
20/04/13 08:20:54 INFO DAGScheduler: Job 8 failed: aggregate at MatrixFactorizationModel.scala:97, took 67.185968 s
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 67.0 failed 1 times, most recent failure: Lost task 0.0 in stage 67.0 (TID 128, localhost, executor driver): java.lang.OutOfMemoryError: GC overhead limit exceeded

提示错误代码为:

//基于模型计算数据
val preRatings = model.predict(userMovies)

2. 经历

按照百度的修改了jvm内存大小并没有解决

3. 错误代码

20/04/13 08:20:54 WARN NettyRpcEndpointRef: Error sending message [message = Heartbeat(driver,[Lscala.Tuple2;@1805cb5a,BlockManagerId(driver, 192.168.198.1, 51377, None))] in 1 attempts
org.apache.spark.rpc.RpcTimeoutException: Cannot receive any reply in 10 seconds. This timeout is controlled by spark.executor.heartbeatInterval
	at org.apache.spark.rpc.RpcTimeout.org$apache$spark$rpc$RpcTimeout$$createRpcTimeoutException(RpcTimeout.scala:48)
	at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:63)
	at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
	at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36)
	at scala.util.Failure$$anonfun$recover$1.apply(Try.scala:216)
	at scala.util.Try$.apply(Try.scala:192)
	at scala.util.Failure.recover(Try.scala:216)
	at scala.concurrent.Future$$anonfun$recover$1.apply(Future.scala:326)
	at scala.concurrent.Future$$anonfun$recover$1.apply(Future.scala:326)
	at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:32)
	at org.spark_project.guava.util.concurrent.MoreExecutors$SameThreadExecutorService.execute(MoreExecutors.java:293)
	at scala.concurrent.impl.ExecutionContextImpl$$anon$1.execute(ExecutionContextImpl.scala:136)
	at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:40)
	at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
	at scala.concurrent.Promise$class.complete(Promise.scala:55)
	at scala.concurrent.impl.Promise$DefaultPromise.complete(Promise.scala:153)
	at scala.concurrent.Future$$anonfun$map$1.apply(Future.scala:237)
	at scala.concurrent.Future$$anonfun$map$1.apply(Future.scala:237)
	at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:32)
	at scala.concurrent.BatchingExecutor$Batch$$anonfun$run$1.processBatch$1(BatchingExecutor.scala:63)
	at scala.concurrent.BatchingExecutor$Batch$$anonfun$run$1.apply$mcV$sp(BatchingExecutor.scala:78)
	at scala.concurrent.BatchingExecutor$Batch$$anonfun$run$1.apply(BatchingExecutor.scala:55)
	at scala.concurrent.BatchingExecutor$Batch$$anonfun$run$1.apply(BatchingExecutor.scala:55)
	at scala.concurrent.BlockContext$.withBlockContext(BlockContext.scala:72)
	at scala.concurrent.BatchingExecutor$Batch.run(BatchingExecutor.scala:54)
	at scala.concurrent.Future$InternalCallbackExecutor$.unbatchedExecute(Future.scala:601)
	at scala.concurrent.BatchingExecutor$class.execute(BatchingExecutor.scala:106)
	at scala.concurrent.Future$InternalCallbackExecutor$.execute(Future.scala:599)
	at scala.concurrent.impl.CallbackRunnable.executeWithValue(Promise.scala:40)
	at scala.concurrent.impl.Promise$DefaultPromise.tryComplete(Promise.scala:248)
	at scala.concurrent.Promise$class.tryFailure(Promise.scala:112)
	at scala.concurrent.impl.Promise$DefaultPromise.tryFailure(Promise.scala:153)
	at org.apache.spark.rpc.netty.NettyRpcEnv.org$apache$spark$rpc$netty$NettyRpcEnv$$onFailure$1(NettyRpcEnv.scala:205)
	at org.apache.spark.rpc.netty.NettyRpcEnv$$anon$1.run(NettyRpcEnv.scala:239)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: java.util.concurrent.TimeoutException: Cannot receive any reply in 10 seconds
	... 8 more
20/04/13 08:20:54 ERROR Executor: Exception in task 0.0 in stage 67.0 (TID 128)
java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
20/04/13 08:20:54 ERROR SparkUncaughtExceptionHandler: Uncaught exception in thread Thread[Executor task launch worker for task 128,5,main]
java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
20/04/13 08:20:54 INFO SparkContext: Invoking stop() from shutdown hook
20/04/13 08:20:54 WARN TaskSetManager: Lost task 0.0 in stage 67.0 (TID 128, localhost, executor driver): java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)

20/04/13 08:20:54 INFO MongoClientCache: Closing MongoClient: [192.168.3.66:27017]
20/04/13 08:20:54 INFO connection: Closed connection [connectionId{localValue:4, serverValue:4}] to 192.168.3.66:27017 because the pool has been closed.
20/04/13 08:20:54 ERROR TaskSetManager: Task 0 in stage 67.0 failed 1 times; aborting job
20/04/13 08:20:54 INFO TaskSchedulerImpl: Removed TaskSet 67.0, whose tasks have all completed, from pool 
20/04/13 08:20:54 INFO SparkUI: Stopped Spark web UI at http://192.168.198.1:4040
20/04/13 08:20:54 INFO TaskSchedulerImpl: Cancelling stage 67

20/04/13 08:20:54 INFO DAGScheduler: ResultStage 67 (aggregate at MatrixFactorizationModel.scala:97) failed in 65.678 s due to Job aborted due to stage failure: Task 0 in stage 67.0 failed 1 times, most recent failure: Lost task 0.0 in stage 67.0 (TID 128, localhost, executor driver): java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)

Driver stacktrace:
20/04/13 08:20:54 INFO DAGScheduler: Job 8 failed: aggregate at MatrixFactorizationModel.scala:97, took 67.185968 s
Exception in thread "main" org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 67.0 failed 1 times, most recent failure: Lost task 0.0 in stage 67.0 (TID 128, localhost, executor driver): java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1435)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1423)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1422)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1422)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:802)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:802)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1650)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:628)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1925)
	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1988)
	at org.apache.spark.rdd.RDD$$anonfun$aggregate$1.apply(RDD.scala:1115)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
	at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
	at org.apache.spark.rdd.RDD.aggregate(RDD.scala:1108)
	at org.apache.spark.mllib.recommendation.MatrixFactorizationModel.countApproxDistinctUserProduct(MatrixFactorizationModel.scala:97)
	at org.apache.spark.mllib.recommendation.MatrixFactorizationModel.predict(MatrixFactorizationModel.scala:127)
	at com.bysj.offline.OfflineRecommender$.main(OfflineRecommender.scala:138)
	at com.bysj.offline.OfflineRecommender.main(OfflineRecommender.scala)
Caused by: java.lang.OutOfMemoryError: GC overhead limit exceeded
	at java.util.Arrays.copyOf(Arrays.java:3332)
	at java.lang.StringCoding.safeTrim(StringCoding.java:89)
	at java.lang.StringCoding.decode(StringCoding.java:230)
	at java.lang.String.<init>(String.java:463)
	at org.bson.io.ByteBufferBsonInput.readString(ByteBufferBsonInput.java:115)
	at org.bson.BsonBinaryReader.doReadString(BsonBinaryReader.java:226)
	at org.bson.AbstractBsonReader.readString(AbstractBsonReader.java:430)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:31)
	at org.bson.codecs.BsonStringCodec.decode(BsonStringCodec.java:28)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.operation.CommandResultArrayCodec.decode(CommandResultArrayCodec.java:48)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:53)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at org.bson.codecs.configuration.LazyCodec.decode(LazyCodec.java:47)
	at org.bson.codecs.BsonDocumentCodec.readValue(BsonDocumentCodec.java:98)
	at com.mongodb.operation.CommandResultDocumentCodec.readValue(CommandResultDocumentCodec.java:56)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:81)
	at org.bson.codecs.BsonDocumentCodec.decode(BsonDocumentCodec.java:40)
	at com.mongodb.connection.ReplyMessage.<init>(ReplyMessage.java:57)
	at com.mongodb.connection.CommandProtocol.getResponseDocument(CommandProtocol.java:149)
	at com.mongodb.connection.CommandProtocol.execute(CommandProtocol.java:118)
	at com.mongodb.connection.DefaultServer$DefaultServerProtocolExecutor.execute(DefaultServer.java:159)
	at com.mongodb.connection.DefaultServerConnection.executeProtocol(DefaultServerConnection.java:286)
	at com.mongodb.connection.DefaultServerConnection.command(DefaultServerConnection.java:173)
	at com.mongodb.operation.QueryBatchCursor.getMore(QueryBatchCursor.java:209)
	at com.mongodb.operation.QueryBatchCursor.hasNext(QueryBatchCursor.java:103)
	at com.mongodb.MongoBatchCursorAdapter.hasNext(MongoBatchCursorAdapter.java:46)
	at scala.collection.convert.Wrappers$JIteratorWrapper.hasNext(Wrappers.scala:42)
	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
20/04/13 08:20:58 WARN NettyRpcEndpointRef: Error sending message [message = Heartbeat(driver,[Lscala.Tuple2;@1805cb5a,BlockManagerId(driver, 192.168.198.1, 51377, None))] in 2 attempts
org.apache.spark.SparkException: Exception thrown in awaitResult
	at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:77)
	at org.apache.spark.rpc.RpcTimeout$$anonfun$1.applyOrElse(RpcTimeout.scala:75)
	at scala.runtime.AbstractPartialFunction.apply(AbstractPartialFunction.scala:36)
	at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
	at org.apache.spark.rpc.RpcTimeout$$anonfun$addMessageIfTimeout$1.applyOrElse(RpcTimeout.scala:59)
	at scala.PartialFunction$OrElse.apply(PartialFunction.scala:167)
	at org.apache.spark.rpc.RpcTimeout.awaitResult(RpcTimeout.scala:83)
	at org.apache.spark.rpc.RpcEndpointRef.askWithRetry(RpcEndpointRef.scala:102)
	at org.apache.spark.executor.Executor.org$apache$spark$executor$Executor$$reportHeartBeat(Executor.scala:689)
	at org.apache.spark.executor.Executor$$anon$1$$anonfun$run$1.apply$mcV$sp(Executor.scala:718)
	at org.apache.spark.executor.Executor$$anon$1$$anonfun$run$1.apply(Executor.scala:718)
	at org.apache.spark.executor.Executor$$anon$1$$anonfun$run$1.apply(Executor.scala:718)
	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1951)
	at org.apache.spark.executor.Executor$$anon$1.run(Executor.scala:718)
	at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
	at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
	at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
Caused by: org.apache.spark.SparkException: Could not find HeartbeatReceiver.
	at org.apache.spark.rpc.netty.Dispatcher.postMessage(Dispatcher.scala:154)
	at org.apache.spark.rpc.netty.Dispatcher.postLocalMessage(Dispatcher.scala:129)
	at org.apache.spark.rpc.netty.NettyRpcEnv.ask(NettyRpcEnv.scala:225)
	at org.apache.spark.rpc.netty.NettyRpcEndpointRef.ask(NettyRpcEnv.scala:507)
	at org.apache.spark.rpc.RpcEndpointRef.askWithRetry(RpcEndpointRef.scala:101)
	... 13 more

狮子对你微笑的主页 狮子对你微笑 | 初学一级 | 园豆:2
提问于:2020-04-13 08:36

适当增大内存如果还不行,可以考虑优化spark参数。

。淑女范erり 4年前

@。淑女范erり: 还是不行

想对你微笑 4年前
< >
分享
最佳答案
0

在需要用的RDD的原始数据集上加.cache(),进行缓存一下,它就不会重复计算了,整个的计算的效率会高一些,不会出现内存溢出的问题

狮子对你微笑 | 初学一级 |园豆:2 | 2020-04-13 10:11
清除回答草稿
   您需要登录以后才能回答,未注册用户请先注册