[Spark] Executor 는 최소 메모리 기준이 존재한다.

Woong·2022년 7월 1일
0

Apache Spark

목록 보기
17/22
  • 471859200 = 450 MB. 이 이상 할당해야만 한다.
java.lang.IllegalArgumentException: Executor memory 268435456 must be at least 471859200. Please increase executor memory using the --executor-memory option or spark.executor.memory in Spark configuration.
        at org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:229)
        at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:201)
        at org.apache.spark.SparkEnv$.create(SparkEnv.scala:340)
        at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
        at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2555)
        at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$1(SparkSession.scala:930)
        at scala.Option.getOrElse(Option.scala:189)
        at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:921)
        at com.wins.bd1.ai.stats.Stats$.<init>(Stats.scala:46)
        at com.wins.bd1.ai.stats.Stats$.<clinit>(Stats.scala)
        at com.wins.bd1.ai.stats.Stats.main(Stats.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
22/07/01 15:23:26 INFO SparkContext: Successfully stopped SparkContext
Exception in thread "main" java.lang.ExceptionInInitializerError
        at com.wins.bd1.ai.stats.Stats.main(Stats.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
        at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:928)
        at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:180)
        at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:203)
        at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:90)
        at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1007)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1016)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.IllegalArgumentException: Executor memory 268435456 must be at least 471859200. Please increase executor memory using the --executor-memory option or spark.executor.memory in Spark configuration.
        at org.apache.spark.memory.UnifiedMemoryManager$.getMaxMemory(UnifiedMemoryManager.scala:229)
        at org.apache.spark.memory.UnifiedMemoryManager$.apply(UnifiedMemoryManager.scala:201)
        at org.apache.spark.SparkEnv$.create(SparkEnv.scala:340)
        at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:189)
        at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:267)
        at org.apache.spark.SparkContext.<init>(SparkContext.scala:442)
        at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2555)
        at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$1(SparkSession.scala:930)
        at scala.Option.getOrElse(Option.scala:189)
        at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:921)
        at com.wins.bd1.ai.stats.Stats$.<init>(Stats.scala:46)
        at com.wins.bd1.ai.stats.Stats$.<clinit>(Stats.scala)
        ... 13 more
22/07/01 15:23:26 INFO ShutdownHookManager: Shutdown hook called
22/07/01 15:23:26 INFO ShutdownHookManager: Deleting directory /tmp/spark-4f401978-abd2-4a98-8833-e44b028c97d0

0개의 댓글